aboutsummaryrefslogtreecommitdiffhomepage
path: root/src/gpu/ccpr
diff options
context:
space:
mode:
authorGravatar Chris Dalton <csmartdalton@google.com>2018-06-22 11:43:31 -0600
committerGravatar Skia Commit-Bot <skia-commit-bot@chromium.org>2018-06-22 20:44:17 +0000
commita8429cf8fd4148f8c433afa83e5f880ff9635e40 (patch)
treec0cc1c2981a9f934a6c5171e772020f3b95a0a8b /src/gpu/ccpr
parentc5c3df66430aafcc9bbe9335292a274066d2a611 (diff)
ccpr: Cache paths with >=50% visibility
Adds a hit count to cache entries. Paths now don't get stashed until their second hit (and cached on their third). Mostly-visible, cachable paths render their entire mask on the second hit, in hopes that we will be able to cache them alongside the fully visible ones. Bug: skia: Change-Id: Idd18f5dc3090f13531f630d229f4808198695fea Reviewed-on: https://skia-review.googlesource.com/136541 Commit-Queue: Chris Dalton <csmartdalton@google.com> Reviewed-by: Robert Phillips <robertphillips@google.com>
Diffstat (limited to 'src/gpu/ccpr')
-rw-r--r--src/gpu/ccpr/GrCCDrawPathsOp.cpp105
-rw-r--r--src/gpu/ccpr/GrCCDrawPathsOp.h22
-rw-r--r--src/gpu/ccpr/GrCCPathCache.cpp4
-rw-r--r--src/gpu/ccpr/GrCCPathCache.h13
4 files changed, 100 insertions, 44 deletions
diff --git a/src/gpu/ccpr/GrCCDrawPathsOp.cpp b/src/gpu/ccpr/GrCCDrawPathsOp.cpp
index 4eddd84034..612cc15e02 100644
--- a/src/gpu/ccpr/GrCCDrawPathsOp.cpp
+++ b/src/gpu/ccpr/GrCCDrawPathsOp.cpp
@@ -22,35 +22,50 @@ static bool has_coord_transforms(const GrPaint& paint) {
return false;
}
+static int64_t area(const SkIRect& r) {
+ return sk_64_mul(r.height(), r.width());
+}
+
std::unique_ptr<GrCCDrawPathsOp> GrCCDrawPathsOp::Make(GrContext* context,
const SkIRect& clipIBounds,
const SkMatrix& m,
const GrShape& shape,
const SkRect& devBounds,
GrPaint&& paint) {
- bool canStashPathMask = true;
- SkIRect looseClippedIBounds;
- devBounds.roundOut(&looseClippedIBounds); // GrCCPathParser might find slightly tighter bounds.
- if (!clipIBounds.contains(looseClippedIBounds)) {
- canStashPathMask = false;
- if (!looseClippedIBounds.intersect(clipIBounds)) {
+ SkIRect shapeDevIBounds;
+ devBounds.roundOut(&shapeDevIBounds); // GrCCPathParser might find slightly tighter bounds.
+
+ SkIRect maskDevIBounds;
+ Visibility maskVisibility;
+ if (clipIBounds.contains(shapeDevIBounds)) {
+ maskDevIBounds = shapeDevIBounds;
+ maskVisibility = Visibility::kComplete;
+ } else {
+ if (!maskDevIBounds.intersect(clipIBounds, shapeDevIBounds)) {
return nullptr;
}
+ int64_t unclippedArea = area(shapeDevIBounds);
+ int64_t clippedArea = area(maskDevIBounds);
+ maskVisibility = (clippedArea >= unclippedArea/2 || unclippedArea < 100*100)
+ ? Visibility::kMostlyComplete // i.e., visible enough to justify rendering the
+ // whole thing if we think we can cache it.
+ : Visibility::kPartial;
}
GrOpMemoryPool* pool = context->contextPriv().opMemoryPool();
- return pool->allocate<GrCCDrawPathsOp>(looseClippedIBounds, m, shape, canStashPathMask,
- devBounds, std::move(paint));
+ return pool->allocate<GrCCDrawPathsOp>(m, shape, shapeDevIBounds, maskDevIBounds,
+ maskVisibility, devBounds, std::move(paint));
}
-GrCCDrawPathsOp::GrCCDrawPathsOp(const SkIRect& looseClippedIBounds, const SkMatrix& m,
- const GrShape& shape, bool canStashPathMask,
- const SkRect& devBounds, GrPaint&& paint)
+GrCCDrawPathsOp::GrCCDrawPathsOp(const SkMatrix& m, const GrShape& shape,
+ const SkIRect& shapeDevIBounds, const SkIRect& maskDevIBounds,
+ Visibility maskVisibility, const SkRect& devBounds,
+ GrPaint&& paint)
: GrDrawOp(ClassID())
, fViewMatrixIfUsingLocalCoords(has_coord_transforms(paint) ? m : SkMatrix::I())
, fSRGBFlags(GrPipeline::SRGBFlagsFromPaint(paint))
- , fDraws(looseClippedIBounds, m, shape, paint.getColor(), canStashPathMask)
+ , fDraws(m, shape, shapeDevIBounds, maskDevIBounds, maskVisibility, paint.getColor())
, fProcessors(std::move(paint)) { // Paint must be moved after fetching its color above.
SkDEBUGCODE(fBaseInstance = -1);
// FIXME: intersect with clip bounds to (hopefully) improve batching.
@@ -65,13 +80,16 @@ GrCCDrawPathsOp::~GrCCDrawPathsOp() {
}
}
-GrCCDrawPathsOp::SingleDraw::SingleDraw(const SkIRect& clippedDevIBounds, const SkMatrix& m,
- const GrShape& shape, GrColor color, bool canStashPathMask)
- : fLooseClippedIBounds(clippedDevIBounds)
- , fMatrix(m)
+GrCCDrawPathsOp::SingleDraw::SingleDraw(const SkMatrix& m, const GrShape& shape,
+ const SkIRect& shapeDevIBounds,
+ const SkIRect& maskDevIBounds, Visibility maskVisibility,
+ GrColor color)
+ : fMatrix(m)
, fShape(shape)
- , fColor(color)
- , fCanStashPathMask(canStashPathMask) {
+ , fShapeDevIBounds(shapeDevIBounds)
+ , fMaskDevIBounds(maskDevIBounds)
+ , fMaskVisibility(maskVisibility)
+ , fColor(color) {
#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
if (fShape.hasUnstyledKey()) {
// On AOSP we round view matrix translates to integer values for cachable paths. We do this
@@ -140,9 +158,12 @@ void GrCCDrawPathsOp::accountForOwnPaths(GrCCPathCache* pathCache,
draw.fShape.asPath(&path);
MaskTransform m(draw.fMatrix, &draw.fCachedMaskShift);
- draw.fCacheEntry = pathCache->find(draw.fShape, m, CreateIfAbsent(draw.fCanStashPathMask));
+ bool canStashPathMask = draw.fMaskVisibility >= Visibility::kMostlyComplete;
+ draw.fCacheEntry = pathCache->find(draw.fShape, m, CreateIfAbsent(canStashPathMask));
+
if (auto cacheEntry = draw.fCacheEntry.get()) {
SkASSERT(!cacheEntry->currFlushAtlas()); // Shouldn't be set until setupResources().
+
if (cacheEntry->atlasKey().isValid()) {
// Does the path already exist in a cached atlas?
if (cacheEntry->hasCachedAtlas() &&
@@ -168,18 +189,20 @@ void GrCCDrawPathsOp::accountForOwnPaths(GrCCPathCache* pathCache,
cacheEntry->resetAtlasKeyAndInfo();
}
- if (!draw.fCanStashPathMask) {
- // No point in keeping this cache entry around anymore if we aren't going to try and
- // stash the the rendered path mask after flush.
- draw.fCacheEntry = nullptr;
- pathCache->evict(cacheEntry);
+ if (Visibility::kMostlyComplete == draw.fMaskVisibility && cacheEntry->hitCount() > 1 &&
+ SkTMax(draw.fShapeDevIBounds.height(),
+ draw.fShapeDevIBounds.width()) <= onFlushRP->caps()->maxRenderTargetSize()) {
+ // We've seen this path before with a compatible matrix, and it's mostly visible.
+ // Just render the whole mask so we can try to cache it.
+ draw.fMaskDevIBounds = draw.fShapeDevIBounds;
+ draw.fMaskVisibility = Visibility::kComplete;
}
}
++specs->fNumRenderedPaths;
specs->fRenderedPathStats.statPath(path);
- specs->fRenderedAtlasSpecs.accountForSpace(draw.fLooseClippedIBounds.width(),
- draw.fLooseClippedIBounds.height());
+ specs->fRenderedAtlasSpecs.accountForSpace(draw.fMaskDevIBounds.width(),
+ draw.fMaskDevIBounds.height());
}
}
@@ -244,21 +267,37 @@ void GrCCDrawPathsOp::setupResources(GrOnFlushResourceProvider* onFlushRP,
SkRect devBounds, devBounds45;
SkIRect devIBounds;
SkIVector devToAtlasOffset;
- if (auto atlas = resources->renderPathInAtlas(draw.fLooseClippedIBounds, draw.fMatrix, path,
+ if (auto atlas = resources->renderPathInAtlas(draw.fMaskDevIBounds, draw.fMatrix, path,
&devBounds, &devBounds45, &devIBounds,
&devToAtlasOffset)) {
this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx());
resources->appendDrawPathInstance().set(devBounds, devBounds45, devToAtlasOffset,
draw.fColor, doEvenOddFill);
- if (draw.fCacheEntry && draw.fCanStashPathMask &&
- resources->nextAtlasToStash() == atlas) {
+
+ // If we have a spot in the path cache, try to make a note of where this mask is so we
+ // can reuse it in the future.
+ if (auto cacheEntry = draw.fCacheEntry.get()) {
+ SkASSERT(!cacheEntry->hasCachedAtlas());
+
+ if (Visibility::kComplete != draw.fMaskVisibility || cacheEntry->hitCount() <= 1) {
+ // Don't cache a path mask unless it's completely visible with a hit count > 1.
+ //
+ // NOTE: mostly-visible paths with a hit count > 1 should have been promoted to
+ // fully visible during accountForOwnPaths().
+ continue;
+ }
+
+ if (resources->nextAtlasToStash() != atlas) {
+ // This mask does not belong to the atlas that will be stashed for next flush.
+ continue;
+ }
+
const GrUniqueKey& atlasKey =
resources->nextAtlasToStash()->getOrAssignUniqueKey(onFlushRP);
- draw.fCacheEntry->initAsStashedAtlas(atlasKey, devToAtlasOffset, devBounds,
- devBounds45, devIBounds,
- draw.fCachedMaskShift);
+ cacheEntry->initAsStashedAtlas(atlasKey, devToAtlasOffset, devBounds, devBounds45,
+ devIBounds, draw.fCachedMaskShift);
// Remember this atlas in case we encounter the path again during the same flush.
- draw.fCacheEntry->setCurrFlushAtlas(atlas);
+ cacheEntry->setCurrFlushAtlas(atlas);
}
continue;
}
diff --git a/src/gpu/ccpr/GrCCDrawPathsOp.h b/src/gpu/ccpr/GrCCDrawPathsOp.h
index e26b0f78c1..8bfccc4745 100644
--- a/src/gpu/ccpr/GrCCDrawPathsOp.h
+++ b/src/gpu/ccpr/GrCCDrawPathsOp.h
@@ -71,8 +71,15 @@ public:
private:
friend class GrOpMemoryPool;
- GrCCDrawPathsOp(const SkIRect& clippedDevIBounds, const SkMatrix&, const GrShape&,
- bool canStashPathMask, const SkRect& devBounds, GrPaint&&);
+ enum class Visibility {
+ kPartial,
+ kMostlyComplete, // (i.e., can we cache the whole path mask if we think it will be reused?)
+ kComplete
+ };
+
+ GrCCDrawPathsOp(const SkMatrix&, const GrShape&, const SkIRect& shapeDevIBounds,
+ const SkIRect& maskDevIBounds, Visibility maskVisibility,
+ const SkRect& devBounds, GrPaint&&);
void recordInstance(const GrTextureProxy* atlasProxy, int instanceIdx);
@@ -80,18 +87,17 @@ private:
const uint32_t fSRGBFlags;
struct SingleDraw {
- SingleDraw(const SkIRect& clippedDevIBounds, const SkMatrix&, const GrShape&, GrColor,
- bool canStashPathMask);
+ SingleDraw(const SkMatrix&, const GrShape&, const SkIRect& shapeDevIBounds,
+ const SkIRect& maskDevIBounds, Visibility maskVisibility, GrColor);
~SingleDraw();
- const SkIRect fLooseClippedIBounds;
SkMatrix fMatrix;
const GrShape fShape;
+ const SkIRect fShapeDevIBounds;
+ SkIRect fMaskDevIBounds;
+ Visibility fMaskVisibility;
GrColor fColor;
- // If we render the path, can we stash its atlas and copy to the resource cache next flush?
- const bool fCanStashPathMask;
-
sk_sp<GrCCPathCacheEntry> fCacheEntry;
sk_sp<GrTextureProxy> fCachedAtlasProxy;
SkIVector fCachedMaskShift;
diff --git a/src/gpu/ccpr/GrCCPathCache.cpp b/src/gpu/ccpr/GrCCPathCache.cpp
index 36e824e81f..b6eb82a05a 100644
--- a/src/gpu/ccpr/GrCCPathCache.cpp
+++ b/src/gpu/ccpr/GrCCPathCache.cpp
@@ -114,7 +114,9 @@ sk_sp<GrCCPathCacheEntry> GrCCPathCache::find(const GrShape& shape, const MaskTr
if (HashNode* node = fHashTable.find({keyData.get()})) {
entry = node->entry();
SkASSERT(this == entry->fCacheWeakPtr);
- if (!fuzzy_equals(m, entry->fMaskTransform)) {
+ if (fuzzy_equals(m, entry->fMaskTransform)) {
+ ++entry->fHitCount;
+ } else {
this->evict(entry); // The path was reused with an incompatible matrix.
entry = nullptr;
}
diff --git a/src/gpu/ccpr/GrCCPathCache.h b/src/gpu/ccpr/GrCCPathCache.h
index 0e6f4f055d..6315f7cbd3 100644
--- a/src/gpu/ccpr/GrCCPathCache.h
+++ b/src/gpu/ccpr/GrCCPathCache.h
@@ -93,13 +93,21 @@ private:
};
/**
- * This class stores all the data necessary to draw a specific path from its corresponding cached
- * atlas.
+ * This class stores all the data necessary to draw a specific path + matrix combination from their
+ * corresponding cached atlas.
*/
class GrCCPathCacheEntry : public SkPathRef::GenIDChangeListener {
public:
SK_DECLARE_INTERNAL_LLIST_INTERFACE(GrCCPathCacheEntry);
+ // The number of times this specific entry (path + matrix combination) has been pulled from
+ // the path cache. As long as the caller does exactly one lookup per draw, this translates to
+ // the number of times the path has been drawn with a compatible matrix.
+ //
+ // If the entry did not previously exist and was created during
+ // GrCCPathCache::find(.., CreateIfAbsent::kYes), its hit count will be 1.
+ int hitCount() const { return fHitCount; }
+
// Does this entry reference a permanent, 8-bit atlas that resides in the resource cache?
// (i.e. not a temporarily-stashed, fp16 coverage count atlas.)
bool hasCachedAtlas() const { return SkToBool(fCachedAtlasInfo); }
@@ -150,6 +158,7 @@ private:
GrCCPathCache* fCacheWeakPtr; // Gets manually reset to null by the path cache upon eviction.
const MaskTransform fMaskTransform;
+ int fHitCount = 1;
GrUniqueKey fAtlasKey;
SkIVector fAtlasOffset;