aboutsummaryrefslogtreecommitdiffhomepage
path: root/tools
diff options
context:
space:
mode:
authorGravatar commit-bot@chromium.org <commit-bot@chromium.org@2bbb7eff-a529-9590-31e7-b0007b416f81>2013-07-19 18:54:04 +0000
committerGravatar commit-bot@chromium.org <commit-bot@chromium.org@2bbb7eff-a529-9590-31e7-b0007b416f81>2013-07-19 18:54:04 +0000
commit6645cde43d7dbf0df76dbda2c089d8f58560e3e2 (patch)
tree1664f8f52c69f9a7ef75e4d94cfa716f67554c7a /tools
parentfc70a4ae593aec6bbaeb1b6573627328b2ef2ad0 (diff)
Fix build problem with reverted issue 16948011
BUG= R=caryclark@google.com Author: sglez@google.com Review URL: https://chromiumcodereview.appspot.com/19541009 git-svn-id: http://skia.googlecode.com/svn/trunk@10201 2bbb7eff-a529-9590-31e7-b0007b416f81
Diffstat (limited to 'tools')
-rw-r--r--tools/bbh_shootout.cpp385
-rw-r--r--tools/lua/bbh_filter.lua148
2 files changed, 533 insertions, 0 deletions
diff --git a/tools/bbh_shootout.cpp b/tools/bbh_shootout.cpp
new file mode 100644
index 0000000000..b0ef8528ad
--- /dev/null
+++ b/tools/bbh_shootout.cpp
@@ -0,0 +1,385 @@
+/*
+ * Copyright 2013 Google Inc.
+ *
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#include "BenchTimer.h"
+#include "LazyDecodeBitmap.h"
+#include "PictureBenchmark.h"
+#include "PictureRenderer.h"
+#include "SkBenchmark.h"
+#include "SkForceLinking.h"
+#include "SkGraphics.h"
+#include "SkStream.h"
+#include "SkString.h"
+#include "TimerData.h"
+
+static const int kNumNormalRecordings = SkBENCHLOOP(10);
+static const int kNumRTreeRecordings = SkBENCHLOOP(10);
+static const int kNumPlaybacks = SkBENCHLOOP(4);
+static const size_t kNumBaseBenchmarks = 3;
+static const size_t kNumTileSizes = 3;
+static const size_t kNumBbhPlaybackBenchmarks = 3;
+static const size_t kNumBenchmarks = kNumBaseBenchmarks + kNumBbhPlaybackBenchmarks;
+
+enum BenchmarkType {
+ kNormal_BenchmarkType = 0,
+ kRTree_BenchmarkType,
+};
+
+struct Histogram {
+ Histogram() {
+ // Make fCpuTime negative so that we don't mess with stats:
+ fCpuTime = SkIntToScalar(-1);
+ }
+ SkScalar fCpuTime;
+ SkString fPath;
+};
+
+typedef void (*BenchmarkFunction)
+ (BenchmarkType, const SkISize&, const SkString&, SkPicture*, BenchTimer*);
+
+// Defined below.
+static void benchmark_playback(
+ BenchmarkType, const SkISize&, const SkString&, SkPicture*, BenchTimer*);
+static void benchmark_recording(
+ BenchmarkType, const SkISize&, const SkString&, SkPicture*, BenchTimer*);
+
+/**
+ * Acts as a POD containing information needed to run a benchmark.
+ * Provides static methods to poll benchmark info from an index.
+ */
+struct BenchmarkControl {
+ SkISize fTileSize;
+ BenchmarkType fType;
+ BenchmarkFunction fFunction;
+ SkString fName;
+
+ /**
+ * Will construct a BenchmarkControl instance from an index between 0 an kNumBenchmarks.
+ */
+ static BenchmarkControl Make(size_t i) {
+ SkASSERT(kNumBenchmarks > i);
+ BenchmarkControl benchControl;
+ benchControl.fTileSize = getTileSize(i);
+ benchControl.fType = getBenchmarkType(i);
+ benchControl.fFunction = getBenchmarkFunc(i);
+ benchControl.fName = getBenchmarkName(i);
+ return benchControl;
+ }
+
+ enum BaseBenchmarks {
+ kNormalRecord = 0,
+ kRTreeRecord,
+ kNormalPlayback,
+ };
+
+ static SkISize fTileSizes[kNumTileSizes];
+
+ static SkISize getTileSize(size_t i) {
+ // Two of the base benchmarks don't need a tile size. But to maintain simplicity
+ // down the pipeline we have to let a couple of values unused.
+ if (i < kNumBaseBenchmarks) {
+ return SkISize::Make(256, 256);
+ }
+ if (i >= kNumBaseBenchmarks && i < kNumBenchmarks) {
+ return fTileSizes[i - kNumBaseBenchmarks];
+ }
+ SkASSERT(0);
+ return SkISize::Make(0, 0);
+ }
+
+ static BenchmarkType getBenchmarkType(size_t i) {
+ if (i < kNumBaseBenchmarks) {
+ switch (i) {
+ case kNormalRecord:
+ return kNormal_BenchmarkType;
+ case kNormalPlayback:
+ return kNormal_BenchmarkType;
+ case kRTreeRecord:
+ return kRTree_BenchmarkType;
+ }
+ }
+ if (i < kNumBenchmarks) {
+ return kRTree_BenchmarkType;
+ }
+ SkASSERT(0);
+ return kRTree_BenchmarkType;
+ }
+
+ static BenchmarkFunction getBenchmarkFunc(size_t i) {
+ // Base functions.
+ switch (i) {
+ case kNormalRecord:
+ return benchmark_recording;
+ case kNormalPlayback:
+ return benchmark_playback;
+ case kRTreeRecord:
+ return benchmark_recording;
+ }
+ // RTree playbacks
+ if (i < kNumBenchmarks) {
+ return benchmark_playback;
+ }
+ SkASSERT(0);
+ return NULL;
+ }
+
+ static SkString getBenchmarkName(size_t i) {
+ // Base benchmark names
+ switch (i) {
+ case kNormalRecord:
+ return SkString("normal_recording");
+ case kNormalPlayback:
+ return SkString("normal_playback");
+ case kRTreeRecord:
+ return SkString("rtree_recording");
+ }
+ // RTree benchmark names.
+ if (i < kNumBenchmarks) {
+ SkASSERT(i >= kNumBaseBenchmarks);
+ SkString name;
+ name.printf("rtree_playback_%dx%d",
+ fTileSizes[i - kNumBaseBenchmarks].fWidth,
+ fTileSizes[i - kNumBaseBenchmarks].fHeight);
+ return name;
+
+ } else {
+ SkASSERT(0);
+ }
+ return SkString("");
+ }
+
+};
+
+SkISize BenchmarkControl::fTileSizes[kNumTileSizes] = {
+ SkISize::Make(256, 256),
+ SkISize::Make(512, 512),
+ SkISize::Make(1024, 1024),
+};
+
+static SkPicture* pic_from_path(const char path[]) {
+ SkFILEStream stream(path);
+ if (!stream.isValid()) {
+ SkDebugf("-- Can't open '%s'\n", path);
+ return NULL;
+ }
+ return SkPicture::CreateFromStream(&stream, &sk_tools::LazyDecodeBitmap);
+}
+
+/**
+ * This function is the sink to which all work ends up going.
+ * Renders the picture into the renderer. It may or may not use an RTree.
+ * The renderer is chosen upstream. If we want to measure recording, we will
+ * use a RecordPictureRenderer. If we want to measure rendering, we eill use a
+ * TiledPictureRenderer.
+ */
+static void do_benchmark_work(sk_tools::PictureRenderer* renderer,
+ int benchmarkType, const SkString& path, SkPicture* pic,
+ const int numRepeats, const char *msg, BenchTimer* timer) {
+ SkString msgPrefix;
+
+ switch (benchmarkType){
+ case kNormal_BenchmarkType:
+ msgPrefix.set("Normal");
+ renderer->setBBoxHierarchyType(sk_tools::PictureRenderer::kNone_BBoxHierarchyType);
+ break;
+ case kRTree_BenchmarkType:
+ msgPrefix.set("RTree");
+ renderer->setBBoxHierarchyType(sk_tools::PictureRenderer::kRTree_BBoxHierarchyType);
+ break;
+ default:
+ SkASSERT(0);
+ break;
+ }
+
+ renderer->init(pic);
+
+ /**
+ * If the renderer is not tiled, assume we are measuring recording.
+ */
+ bool isPlayback = (NULL != renderer->getTiledRenderer());
+
+ SkDebugf("%s %s %s %d times...\n", msgPrefix.c_str(), msg, path.c_str(), numRepeats);
+ for (int i = 0; i < numRepeats; ++i) {
+ renderer->setup();
+ // Render once to fill caches.
+ renderer->render(NULL);
+ // Render again to measure
+ timer->start();
+ bool result = renderer->render(NULL);
+ timer->end();
+ // We only care about a false result on playback. RecordPictureRenderer::render will always
+ // return false because we are passing a NULL file name on purpose; which is fine.
+ if(isPlayback && !result) {
+ SkDebugf("Error rendering during playback.\n");
+ }
+ }
+ renderer->end();
+}
+
+/**
+ * Call do_benchmark_work with a tiled renderer using the default tile dimensions.
+ */
+static void benchmark_playback(
+ BenchmarkType benchmarkType, const SkISize& tileSize,
+ const SkString& path, SkPicture* pic, BenchTimer* timer) {
+ sk_tools::TiledPictureRenderer renderer;
+
+ SkString message("tiled_playback");
+ message.appendf("_%dx%d", tileSize.fWidth, tileSize.fHeight);
+ do_benchmark_work(&renderer, benchmarkType,
+ path, pic, kNumPlaybacks, message.c_str(), timer);
+}
+
+/**
+ * Call do_benchmark_work with a RecordPictureRenderer.
+ */
+static void benchmark_recording(
+ BenchmarkType benchmarkType, const SkISize& tileSize,
+ const SkString& path, SkPicture* pic, BenchTimer* timer) {
+ sk_tools::RecordPictureRenderer renderer;
+ int numRecordings = 0;
+ switch(benchmarkType) {
+ case kRTree_BenchmarkType:
+ numRecordings = kNumRTreeRecordings;
+ break;
+ case kNormal_BenchmarkType:
+ numRecordings = kNumNormalRecordings;
+ break;
+ }
+ do_benchmark_work(&renderer, benchmarkType, path, pic, numRecordings, "recording", timer);
+}
+
+/**
+ * Takes argc,argv along with one of the benchmark functions defined above.
+ * Will loop along all skp files and perform measurments.
+ *
+ * Returns a SkScalar representing CPU time taken during benchmark.
+ * As a side effect, it spits the timer result to stdout.
+ * Will return -1.0 on error.
+ */
+static bool benchmark_loop(
+ int argc,
+ char **argv,
+ const BenchmarkControl& benchControl,
+ SkTArray<Histogram>& histogram) {
+
+ static const SkString timeFormat("%f");
+ TimerData timerData(timeFormat, timeFormat);
+ for (int index = 1; index < argc; ++index) {
+ BenchTimer timer;
+ SkString path(argv[index]);
+ SkAutoTUnref<SkPicture> pic(pic_from_path(path.c_str()));
+ if (NULL == pic) {
+ SkDebugf("Couldn't create picture. Ignoring path: %s\n", path.c_str());
+ continue;
+ }
+ benchControl.fFunction(benchControl.fType, benchControl.fTileSize, path, pic, &timer);
+ timerData.appendTimes(&timer, argc - 1 == index);
+
+ histogram[index - 1].fPath = path;
+ histogram[index - 1].fCpuTime = SkDoubleToScalar(timer.fCpu);
+ }
+
+ const SkString timerResult = timerData.getResult(
+ /*logPerIter = */ false,
+ /*printMin = */ false,
+ /*repeatDraw = */ 1,
+ /*configName = */ benchControl.fName.c_str(),
+ /*showWallTime = */ false,
+ /*showTruncatedWallTime = */ false,
+ /*showCpuTime = */ true,
+ /*showTruncatedCpuTime = */ false,
+ /*showGpuTime = */ false);
+
+ const char findStr[] = "= ";
+ int pos = timerResult.find(findStr);
+ if (-1 == pos) {
+ SkDebugf("Unexpected output from TimerData::getResult(...). Unable to parse.");
+ return false;
+ }
+
+ SkScalar cpuTime = SkDoubleToScalar(atof(timerResult.c_str() + pos + sizeof(findStr) - 1));
+ if (cpuTime == 0) { // atof returns 0.0 on error.
+ SkDebugf("Unable to read value from timer result.\n");
+ return false;
+ }
+ return true;
+}
+
+static int tool_main(int argc, char** argv) {
+ SkAutoGraphics ag;
+ SkString usage;
+ usage.printf("Usage: filename [filename]*\n");
+
+ if (argc < 2) {
+ SkDebugf("%s\n", usage.c_str());
+ return -1;
+ }
+
+ static SkTArray<Histogram> histograms[kNumBenchmarks];
+
+ for (size_t i = 0; i < kNumBenchmarks; ++i) {
+ histograms[i].reset(argc - 1);
+ bool success = benchmark_loop(
+ argc, argv,
+ BenchmarkControl::Make(i),
+ histograms[i]);
+ if (!success) {
+ SkDebugf("benchmark_loop failed at index %d", i);
+ }
+ }
+
+ // Output gnuplot readable histogram data..
+ const char* pbTitle = "bbh_shootout_playback.dat";
+ const char* recTitle = "bbh_shootout_record.dat";
+ SkFILEWStream playbackOut(pbTitle);
+ SkFILEWStream recordOut(recTitle);
+ recordOut.writeText("# ");
+ playbackOut.writeText("# ");
+ for (size_t i = 0; i < kNumBenchmarks; ++i) {
+ SkString out;
+ out.printf("%s ", BenchmarkControl::getBenchmarkName(i).c_str());
+ if (BenchmarkControl::getBenchmarkFunc(i) == &benchmark_recording) {
+ recordOut.writeText(out.c_str());
+ }
+ if (BenchmarkControl::getBenchmarkFunc(i) == &benchmark_playback) {
+ playbackOut.writeText(out.c_str());
+ }
+ }
+ recordOut.writeText("\n");
+ playbackOut.writeText("\n");
+
+ for (int i = 0; i < argc - 1; ++i) {
+ SkString pbLine;
+ SkString recLine;
+ // ==== Write record info
+ recLine.printf("%d ", i);
+ recLine.appendf("%f ", histograms[0][i].fCpuTime); // Append normal_record time
+ recLine.appendf("%f", histograms[1][i].fCpuTime); // Append rtree_record time
+
+ // ==== Write playback info
+ pbLine.printf("%d ", i);
+ pbLine.appendf("%f ", histograms[2][i].fCpuTime); // Start with normal playback time.
+ // Append all playback benchmark times.
+ for (size_t j = kNumBbhPlaybackBenchmarks; j < kNumBenchmarks; ++j) {
+ pbLine.appendf("%f ", histograms[j][i].fCpuTime);
+ }
+ pbLine.remove(pbLine.size() - 1, 1); // Remove trailing space from line.
+ pbLine.appendf("\n");
+ recLine.appendf("\n");
+ playbackOut.writeText(pbLine.c_str());
+ recordOut.writeText(recLine.c_str());
+ }
+ SkDebugf("\nWrote data to gnuplot-readable files: %s %s\n", pbTitle, recTitle);
+
+ return 0;
+}
+
+int main(int argc, char** argv) {
+ return tool_main(argc, argv);
+}
+
diff --git a/tools/lua/bbh_filter.lua b/tools/lua/bbh_filter.lua
new file mode 100644
index 0000000000..73b530c7c7
--- /dev/null
+++ b/tools/lua/bbh_filter.lua
@@ -0,0 +1,148 @@
+-- bbh_filter.lua
+--
+-- This script outputs info about 'interesting' skp files,
+-- where the definition of 'interesting' changes but is roughly:
+-- "Interesting for bounding box hierarchy benchmarks."
+--
+-- Currently, the approach is to output, in equal ammounts, the names of the files that
+-- have most commands, and the names of the files that use the least popular commands.
+
+function count_entries(table)
+ local count = 0
+ for _,_ in pairs(table) do
+ count = count + 1
+ end
+ return count
+end
+
+verbCounts = {}
+
+function reset_current()
+ -- Data about the skp in transit
+ currentInfo = {
+ fileName = '',
+ verbs = {},
+ numOps = 0
+ }
+end
+reset_current()
+
+numOutputFiles = 10 -- This is per measure.
+globalInfo = {} -- Saves currentInfo for each file to be used at the end.
+output = {} -- Stores {fileName, {verb, count}} tables.
+
+function tostr(t)
+ local str = ""
+ for k, v in next, t do
+ if #str > 0 then
+ str = str .. ", "
+ end
+ if type(k) == "number" then
+ str = str .. "[" .. k .. "] = "
+ else
+ str = str .. tostring(k) .. " = "
+ end
+ if type(v) == "table" then
+ str = str .. "{ " .. tostr(v) .. " }"
+ else
+ str = str .. tostring(v)
+ end
+ end
+ return str
+end
+
+function sk_scrape_startcanvas(c, fileName) end
+
+function sk_scrape_endcanvas(c, fileName)
+ globalInfo[fileName] = currentInfo
+ globalInfo[fileName].fileName = fileName
+ reset_current()
+end
+
+function sk_scrape_accumulate(t)
+ -- dump the params in t, specifically showing the verb first, which we
+ -- then nil out so it doesn't appear in tostr()
+ --
+ verbCounts[t.verb] = (verbCounts[t.verb] or 0) + 1
+ currentInfo.verbs[t.verb] = (currentInfo.verbs[t.verb] or 0) + 1
+ currentInfo.numOps = currentInfo.numOps + 1
+
+ t.verb = nil
+end
+
+function sk_scrape_summarize()
+ verbWeights = {} -- {verb, weight}, where 0 < weight <= 1
+
+ meta = {}
+ for k,v in pairs(verbCounts) do
+ table.insert(meta, {key=k, value=v})
+ end
+ table.sort(meta, function (a,b) return a.value > b.value; end)
+ maxValue = meta[1].value
+ io.write("-- ==================\n")
+ io.write("------------------------------------------------------------------ \n")
+ io.write("-- Command\t\t\tNumber of calls\t\tPopularity\n")
+ io.write("------------------------------------------------------------------ \n")
+ for k, v in pairs(meta) do
+ verbWeights[v.key] = v.value / maxValue
+
+ -- Poor man's formatting:
+ local padding = "\t\t\t"
+ if (#v.key + 3) < 8 then
+ padding = "\t\t\t\t"
+ end
+ if (#v.key + 3) >= 16 then
+ padding = "\t\t"
+ end
+
+ io.write ("-- ",v.key, padding, v.value, '\t\t\t', verbWeights[v.key], "\n")
+ end
+
+ meta = {}
+ function calculate_weight(verbs)
+ local weight = 0
+ for name, count in pairs(verbs) do
+ weight = weight + (1 / verbWeights[name]) * count
+ end
+ return weight
+ end
+ for n, info in pairs(globalInfo) do
+ table.insert(meta, info)
+ end
+
+ local visitedFiles = {}
+
+ -- Prints out information in lua readable format
+ function output_with_metric(metric_func, description, numOutputFiles)
+ table.sort(meta, metric_func)
+ print(description)
+ local iter = 0
+ for i, t in pairs(meta) do
+ if not visitedFiles[t.fileName] then
+ visitedFiles[t.fileName] = true
+ io.write ("{\nname = \"", t.fileName, "\", \nverbs = {\n")
+ for verb,count in pairs(globalInfo[t.fileName].verbs) do
+ io.write(' ', verb, " = ", count, ",\n")
+ end
+ io.write("}\n},\n")
+
+ iter = iter + 1
+ if iter >= numOutputFiles then
+ break
+ end
+ end
+ end
+ end
+
+ output_with_metric(
+ function(a, b) return calculate_weight(a.verbs) > calculate_weight(b.verbs); end,
+ "\n-- ================== skps with calling unpopular commands.", 10)
+ output_with_metric(
+ function(a, b) return a.numOps > b.numOps; end,
+ "\n-- ================== skps with the most calls.", 50)
+
+ local count = count_entries(visitedFiles)
+
+ print ("-- Spat", count, "files")
+end
+