From d1201058bf73a0a5c2e19c0cc142082b1827461c Mon Sep 17 00:00:00 2001 From: cdalton Date: Mon, 5 Oct 2015 15:56:34 -0700 Subject: Make path range loading explicit Requires the caller to explicitly preload paths within a range before calling drawPaths. This allows us to remove the implicit lazy load, thereby eliminating a redundant check on every redraw of a text blob. BUG=skia: Review URL: https://codereview.chromium.org/1382013002 --- src/gpu/GrPathRange.h | 69 +++++++++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 64 insertions(+), 5 deletions(-) (limited to 'src/gpu/GrPathRange.h') diff --git a/src/gpu/GrPathRange.h b/src/gpu/GrPathRange.h index a2483c13c2..23b8beb226 100644 --- a/src/gpu/GrPathRange.h +++ b/src/gpu/GrPathRange.h @@ -9,10 +9,10 @@ #define GrPathRange_DEFINED #include "GrGpuResource.h" +#include "SkPath.h" #include "SkRefCnt.h" #include "SkTArray.h" -class SkPath; class SkDescriptor; /** @@ -70,7 +70,67 @@ public: int getNumPaths() const { return fNumPaths; } const PathGenerator* getPathGenerator() const { return fPathGenerator.get(); } + void loadPathsIfNeeded(const void* indices, PathIndexType, int count) const; + + template void loadPathsIfNeeded(const void* indices, int count) const { + if (!fPathGenerator) { + return; + } + + const IndexType* indexArray = reinterpret_cast(indices); + bool didLoadPaths = false; + + for (int i = 0; i < count; ++i) { + SkASSERT(indexArray[i] < static_cast(fNumPaths)); + + const int groupIndex = indexArray[i] / kPathsPerGroup; + const int groupByte = groupIndex / 8; + const uint8_t groupBit = 1 << (groupIndex % 8); + + const bool hasPath = SkToBool(fGeneratedPaths[groupByte] & groupBit); + if (!hasPath) { + // We track which paths are loaded in groups of kPathsPerGroup. To + // mark a path as loaded we need to load the entire group. + const int groupFirstPath = groupIndex * kPathsPerGroup; + const int groupLastPath = SkTMin(groupFirstPath + kPathsPerGroup, fNumPaths) - 1; + + SkPath path; + for (int pathIdx = groupFirstPath; pathIdx <= groupLastPath; ++pathIdx) { + fPathGenerator->generatePath(pathIdx, &path); + this->onInitPath(pathIdx, path); + } + + fGeneratedPaths[groupByte] |= groupBit; + didLoadPaths = true; + } + } + + if (didLoadPaths) { + this->didChangeGpuMemorySize(); + } + } + #ifdef SK_DEBUG + void assertPathsLoaded(const void* indices, PathIndexType, int count) const; + + template void assertPathsLoaded(const void* indices, int count) const { + if (!fPathGenerator) { + return; + } + + const IndexType* indexArray = reinterpret_cast(indices); + + for (int i = 0; i < count; ++i) { + SkASSERT(indexArray[i] < static_cast(fNumPaths)); + + const int groupIndex = indexArray[i] / kPathsPerGroup; + const int groupByte = groupIndex / 8; + const uint8_t groupBit = 1 << (groupIndex % 8); + + SkASSERT(fGeneratedPaths[groupByte] & groupBit); + } + } + virtual bool isEqualTo(const SkDescriptor& desc) const { return nullptr != fPathGenerator.get() && fPathGenerator->isEqualTo(desc); } @@ -82,10 +142,9 @@ protected: virtual void onInitPath(int index, const SkPath&) const = 0; private: - // Notify when paths will be drawn in case this is a lazy-loaded path range. - friend class GrPathRendering; - void willDrawPaths(const void* indices, PathIndexType, int count) const; - template void willDrawPaths(const void* indices, int count) const; + enum { + kPathsPerGroup = 16 // Paths get tracked in groups of 16 for lazy loading. + }; mutable SkAutoTUnref fPathGenerator; mutable SkTArray fGeneratedPaths; -- cgit v1.2.3