aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authorGravatar scroggo@google.com <scroggo@google.com@2bbb7eff-a529-9590-31e7-b0007b416f81>2012-08-14 15:20:01 +0000
committerGravatar scroggo@google.com <scroggo@google.com@2bbb7eff-a529-9590-31e7-b0007b416f81>2012-08-14 15:20:01 +0000
commit3e26bd0c357d849ff40b092decd7a5c46ec2ada4 (patch)
treed8ba75d3d1c99f4af9eb15b314023e1b6728fd11
parent6d552ee5f56e76da6ba244df0f0df2234f2d8977 (diff)
Check in r5063 again, along with fix for tests.
Thank you to Android build, for catching the problem, which would show up elsewhere. Now we access entry->fStorageSlot before deleting entry. (Original message:) Use the SkBitmapHeap to handle SkBitmaps in SkGPipe cross process. Required moving the LRU handles from SkBitmapHeapEntry to LookupEntry. Allows simplification of drawBitmap* calls in SkGPipeCanvas. Review URL: https://codereview.appspot.com/6453113 git-svn-id: http://skia.googlecode.com/svn/trunk@5081 2bbb7eff-a529-9590-31e7-b0007b416f81
-rw-r--r--src/core/SkBitmapHeap.cpp147
-rw-r--r--src/core/SkBitmapHeap.h86
-rw-r--r--src/pipe/SkGPipeRead.cpp3
-rw-r--r--src/pipe/SkGPipeWrite.cpp172
4 files changed, 218 insertions, 190 deletions
diff --git a/src/core/SkBitmapHeap.cpp b/src/core/SkBitmapHeap.cpp
index 924264bf5c..7eb34b07a5 100644
--- a/src/core/SkBitmapHeap.cpp
+++ b/src/core/SkBitmapHeap.cpp
@@ -15,9 +15,7 @@
SkBitmapHeapEntry::SkBitmapHeapEntry()
: fSlot(-1)
, fRefCount(0)
- , fBytesAllocated(0)
- , fMoreRecentlyUsed(NULL)
- , fLessRecentlyUsed(NULL) {
+ , fBytesAllocated(0) {
}
SkBitmapHeapEntry::~SkBitmapHeapEntry() {
@@ -37,6 +35,30 @@ void SkBitmapHeapEntry::addReferences(int count) {
///////////////////////////////////////////////////////////////////////////////
+int SkBitmapHeap::LookupEntry::Compare(const SkBitmapHeap::LookupEntry *a,
+ const SkBitmapHeap::LookupEntry *b) {
+ if (a->fGenerationId < b->fGenerationId) {
+ return -1;
+ } else if (a->fGenerationId > b->fGenerationId) {
+ return 1;
+ } else if (a->fPixelOffset < b->fPixelOffset) {
+ return -1;
+ } else if (a->fPixelOffset > b->fPixelOffset) {
+ return 1;
+ } else if (a->fWidth < b->fWidth) {
+ return -1;
+ } else if (a->fWidth > b->fWidth) {
+ return 1;
+ } else if (a->fHeight < b->fHeight) {
+ return -1;
+ } else if (a->fHeight > b->fHeight) {
+ return 1;
+ }
+ return 0;
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
SkBitmapHeap::SkBitmapHeap(int32_t preferredSize, int32_t ownerCount)
: INHERITED()
, fExternalStorage(NULL)
@@ -92,27 +114,35 @@ SkTRefArray<SkBitmap>* SkBitmapHeap::extractBitmaps() const {
return array;
}
-// We just "used" the entry. Update our LRU accordingly
-void SkBitmapHeap::setMostRecentlyUsed(SkBitmapHeapEntry* entry) {
- SkASSERT(entry != NULL);
- if (entry == fMostRecentlyUsed) {
- return;
- }
- // Remove info from its prior place, and make sure to cover the hole.
- if (fLeastRecentlyUsed == entry) {
+void SkBitmapHeap::removeFromLRU(SkBitmapHeap::LookupEntry* entry) {
+ if (fMostRecentlyUsed == entry) {
+ fMostRecentlyUsed = entry->fLessRecentlyUsed;
+ if (NULL == fMostRecentlyUsed) {
+ SkASSERT(fLeastRecentlyUsed == entry);
+ fLeastRecentlyUsed = NULL;
+ } else {
+ fMostRecentlyUsed->fMoreRecentlyUsed = NULL;
+ }
+ } else {
+ // Remove entry from its prior place, and make sure to cover the hole.
+ if (fLeastRecentlyUsed == entry) {
+ SkASSERT(entry->fMoreRecentlyUsed != NULL);
+ fLeastRecentlyUsed = entry->fMoreRecentlyUsed;
+ }
+ // Since we have already considered the case where entry is the most recently used, it must
+ // have a more recently used at this point.
SkASSERT(entry->fMoreRecentlyUsed != NULL);
- fLeastRecentlyUsed = entry->fMoreRecentlyUsed;
- }
- if (entry->fMoreRecentlyUsed != NULL) {
- SkASSERT(fMostRecentlyUsed != entry);
entry->fMoreRecentlyUsed->fLessRecentlyUsed = entry->fLessRecentlyUsed;
- }
- if (entry->fLessRecentlyUsed != NULL) {
- SkASSERT(fLeastRecentlyUsed != entry);
- entry->fLessRecentlyUsed->fMoreRecentlyUsed = entry->fMoreRecentlyUsed;
+
+ if (entry->fLessRecentlyUsed != NULL) {
+ SkASSERT(fLeastRecentlyUsed != entry);
+ entry->fLessRecentlyUsed->fMoreRecentlyUsed = entry->fMoreRecentlyUsed;
+ }
}
entry->fMoreRecentlyUsed = NULL;
- // Set up the head and tail pointers properly.
+}
+
+void SkBitmapHeap::appendToLRU(SkBitmapHeap::LookupEntry* entry) {
if (fMostRecentlyUsed != NULL) {
SkASSERT(NULL == fMostRecentlyUsed->fMoreRecentlyUsed);
fMostRecentlyUsed->fMoreRecentlyUsed = entry;
@@ -125,19 +155,20 @@ void SkBitmapHeap::setMostRecentlyUsed(SkBitmapHeapEntry* entry) {
}
// iterate through our LRU cache and try to find an entry to evict
-SkBitmapHeapEntry* SkBitmapHeap::findEntryToReplace(const SkBitmap& replacement) {
+SkBitmapHeap::LookupEntry* SkBitmapHeap::findEntryToReplace(const SkBitmap& replacement) {
SkASSERT(fPreferredCount != UNLIMITED_SIZE);
SkASSERT(fStorage.count() >= fPreferredCount);
- SkBitmapHeapEntry* iter = fLeastRecentlyUsed;
+ SkBitmapHeap::LookupEntry* iter = fLeastRecentlyUsed;
while (iter != NULL) {
- if (iter->fRefCount > 0) {
+ SkBitmapHeapEntry* heapEntry = fStorage[iter->fStorageSlot];
+ if (heapEntry->fRefCount > 0) {
// If the least recently used bitmap has not been unreferenced
// by its owner, then according to our LRU specifications a more
// recently used one can not have used all it's references yet either.
return NULL;
}
- if (replacement.pixelRef() && replacement.pixelRef() == iter->fBitmap.pixelRef()) {
+ if (replacement.getGenerationID() == iter->fGenerationId) {
// Do not replace a bitmap with a new one using the same
// pixel ref. Instead look for a different one that will
// potentially free up more space.
@@ -153,21 +184,22 @@ size_t SkBitmapHeap::freeMemoryIfPossible(size_t bytesToFree) {
if (UNLIMITED_SIZE == fPreferredCount) {
return 0;
}
- SkBitmapHeapEntry* iter = fLeastRecentlyUsed;
+ LookupEntry* iter = fLeastRecentlyUsed;
size_t origBytesAllocated = fBytesAllocated;
// Purge starting from LRU until a non-evictable bitmap is found or until
// everything is evicted.
- while (iter && 0 == iter->fRefCount) {
- SkBitmapHeapEntry* next = iter->fMoreRecentlyUsed;
- this->removeEntryFromLookupTable(*iter);
+ while (iter != NULL) {
+ SkBitmapHeapEntry* heapEntry = fStorage[iter->fStorageSlot];
+ if (heapEntry->fRefCount > 0) {
+ break;
+ }
+ LookupEntry* next = iter->fMoreRecentlyUsed;
+ this->removeEntryFromLookupTable(iter);
// Free the pixel memory. removeEntryFromLookupTable already reduced
// fBytesAllocated properly.
- iter->fBitmap.reset();
+ heapEntry->fBitmap.reset();
// Add to list of unused slots which can be reused in the future.
- fUnusedSlots.push(iter->fSlot);
- // Remove its LRU pointers, so that it does not pretend it is already in
- // the list the next time it is used.
- iter->fMoreRecentlyUsed = iter->fLessRecentlyUsed = NULL;
+ fUnusedSlots.push(heapEntry->fSlot);
iter = next;
if (origBytesAllocated - fBytesAllocated >= bytesToFree) {
break;
@@ -193,17 +225,17 @@ size_t SkBitmapHeap::freeMemoryIfPossible(size_t bytesToFree) {
}
int SkBitmapHeap::findInLookupTable(const LookupEntry& indexEntry, SkBitmapHeapEntry** entry) {
- int index = SkTSearch<const LookupEntry>(fLookupTable.begin(),
+ int index = SkTSearch<const LookupEntry>((const LookupEntry**)fLookupTable.begin(),
fLookupTable.count(),
- indexEntry, sizeof(indexEntry));
+ &indexEntry, sizeof(void*), LookupEntry::Compare);
if (index < 0) {
// insert ourselves into the bitmapIndex
index = ~index;
- fLookupTable.insert(index, 1, &indexEntry);
+ *fLookupTable.insert(index) = SkNEW_ARGS(LookupEntry, (indexEntry));
} else if (entry != NULL) {
// populate the entry if needed
- *entry = fStorage[fLookupTable[index].fStorageSlot];
+ *entry = fStorage[fLookupTable[index]->fStorageSlot];
}
return index;
@@ -229,19 +261,16 @@ bool SkBitmapHeap::copyBitmap(const SkBitmap& originalBitmap, SkBitmap& copiedBi
return true;
}
-int SkBitmapHeap::removeEntryFromLookupTable(const SkBitmapHeapEntry& entry) {
+int SkBitmapHeap::removeEntryFromLookupTable(LookupEntry* entry) {
// remove the bitmap index for the deleted entry
SkDEBUGCODE(int count = fLookupTable.count();)
- // FIXME: If copying bitmaps retained the generation ID, we could
- // just grab the generation ID from entry.fBitmap
- LookupEntry key(entry.fBitmap, entry.fGenerationID);
- int index = this->findInLookupTable(key, NULL);
+ int index = this->findInLookupTable(*entry, NULL);
// Verify that findInLookupTable found an existing entry rather than adding
// a new entry to the lookup table.
SkASSERT(count == fLookupTable.count());
-
+ fBytesAllocated -= fStorage[entry->fStorageSlot]->fBytesAllocated;
+ SkDELETE(fLookupTable[index]);
fLookupTable.remove(index);
- fBytesAllocated -= entry.fBytesAllocated;
return index;
}
@@ -249,13 +278,17 @@ int32_t SkBitmapHeap::insert(const SkBitmap& originalBitmap) {
SkBitmapHeapEntry* entry = NULL;
int searchIndex = this->findInLookupTable(LookupEntry(originalBitmap), &entry);
- // check to see if we already had a copy of the bitmap in the heap
if (entry) {
+ // Already had a copy of the bitmap in the heap.
if (fOwnerCount != IGNORE_OWNERS) {
entry->addReferences(fOwnerCount);
}
if (fPreferredCount != UNLIMITED_SIZE) {
- this->setMostRecentlyUsed(entry);
+ LookupEntry* lookupEntry = fLookupTable[searchIndex];
+ if (lookupEntry != fMostRecentlyUsed) {
+ this->removeFromLRU(lookupEntry);
+ this->appendToLRU(lookupEntry);
+ }
}
return entry->fSlot;
}
@@ -263,10 +296,13 @@ int32_t SkBitmapHeap::insert(const SkBitmap& originalBitmap) {
// decide if we need to evict an existing heap entry or create a new one
if (fPreferredCount != UNLIMITED_SIZE && fStorage.count() >= fPreferredCount) {
// iterate through our LRU cache and try to find an entry to evict
- entry = this->findEntryToReplace(originalBitmap);
- // we found an entry to evict
- if (entry) {
- int index = this->removeEntryFromLookupTable(*entry);
+ LookupEntry* lookupEntry = this->findEntryToReplace(originalBitmap);
+ if (lookupEntry != NULL) {
+ // we found an entry to evict
+ entry = fStorage[lookupEntry->fStorageSlot];
+ // Remove it from the LRU. The new entry will be added to the LRU later.
+ this->removeFromLRU(lookupEntry);
+ int index = this->removeEntryFromLookupTable(lookupEntry);
// update the current search index now that we have removed one
if (index < searchIndex) {
@@ -300,6 +336,7 @@ int32_t SkBitmapHeap::insert(const SkBitmap& originalBitmap) {
// if the copy failed then we must abort
if (!copySucceeded) {
// delete the index
+ SkDELETE(fLookupTable[searchIndex]);
fLookupTable.remove(searchIndex);
// If entry is the last slot in storage, it is safe to delete it.
if (fStorage.count() - 1 == entry->fSlot) {
@@ -307,14 +344,16 @@ int32_t SkBitmapHeap::insert(const SkBitmap& originalBitmap) {
fStorage.remove(entry->fSlot);
fBytesAllocated -= sizeof(SkBitmapHeapEntry);
SkDELETE(entry);
+ } else {
+ fUnusedSlots.push(entry->fSlot);
}
return INVALID_SLOT;
}
// update the index with the appropriate slot in the heap
- fLookupTable[searchIndex].fStorageSlot = entry->fSlot;
+ fLookupTable[searchIndex]->fStorageSlot = entry->fSlot;
- // compute the space taken by the this entry
+ // compute the space taken by this entry
// TODO if there is a shared pixel ref don't count it
// If the SkBitmap does not share an SkPixelRef with an SkBitmap already
// in the SharedHeap, also include the size of its pixels.
@@ -323,13 +362,11 @@ int32_t SkBitmapHeap::insert(const SkBitmap& originalBitmap) {
// add the bytes from this entry to the total count
fBytesAllocated += entry->fBytesAllocated;
- entry->fGenerationID = originalBitmap.getGenerationID();
-
if (fOwnerCount != IGNORE_OWNERS) {
entry->addReferences(fOwnerCount);
}
if (fPreferredCount != UNLIMITED_SIZE) {
- this->setMostRecentlyUsed(entry);
+ this->appendToLRU(fLookupTable[searchIndex]);
}
return entry->fSlot;
}
diff --git a/src/core/SkBitmapHeap.h b/src/core/SkBitmapHeap.h
index 3c00b5211b..71c340539f 100644
--- a/src/core/SkBitmapHeap.h
+++ b/src/core/SkBitmapHeap.h
@@ -39,16 +39,12 @@ private:
int32_t fSlot;
int32_t fRefCount;
- uint32_t fGenerationID;
SkBitmap fBitmap;
// Keep track of the bytes allocated for this bitmap. When replacing the
// bitmap or removing this HeapEntry we know how much memory has been
// reclaimed.
size_t fBytesAllocated;
- // TODO: Generalize the LRU caching mechanism
- SkBitmapHeapEntry* fMoreRecentlyUsed;
- SkBitmapHeapEntry* fLessRecentlyUsed;
friend class SkBitmapHeap;
};
@@ -176,7 +172,8 @@ public:
* Returns a count of the number of items currently in the heap
*/
int count() const {
- SkASSERT(fExternalStorage != NULL || fStorage.count() == fLookupTable.count());
+ SkASSERT(fExternalStorage != NULL ||
+ fStorage.count() - fUnusedSlots.count() == fLookupTable.count());
return fLookupTable.count();
}
@@ -197,43 +194,39 @@ public:
private:
struct LookupEntry {
- LookupEntry(const SkBitmap& bm, uint32_t genId = 0) {
- fGenerationId = 0 == genId ? bm.getGenerationID() : genId;
- fPixelOffset = bm.pixelRefOffset();
- fWidth = bm.width();
- fHeight = bm.height();
- }
- uint32_t fGenerationId; // SkPixelRef GenerationID.
- size_t fPixelOffset;
- uint32_t fWidth;
- uint32_t fHeight;
+ LookupEntry(const SkBitmap& bm)
+ : fGenerationId(bm.getGenerationID())
+ , fPixelOffset(bm.pixelRefOffset())
+ , fWidth(bm.width())
+ , fHeight(bm.height())
+ , fMoreRecentlyUsed(NULL)
+ , fLessRecentlyUsed(NULL){}
+
+ const uint32_t fGenerationId; // SkPixelRef GenerationID.
+ const size_t fPixelOffset;
+ const uint32_t fWidth;
+ const uint32_t fHeight;
+
+ // TODO: Generalize the LRU caching mechanism
+ LookupEntry* fMoreRecentlyUsed;
+ LookupEntry* fLessRecentlyUsed;
uint32_t fStorageSlot; // slot of corresponding bitmap in fStorage.
- bool operator < (const LookupEntry& other) const {
- if (this->fGenerationId != other.fGenerationId) {
- return this->fGenerationId < other.fGenerationId;
- } else if(this->fPixelOffset != other.fPixelOffset) {
- return this->fPixelOffset < other.fPixelOffset;
- } else if(this->fWidth != other.fWidth) {
- return this->fWidth < other.fWidth;
- } else {
- return this->fHeight < other.fHeight;
- }
- }
- bool operator != (const LookupEntry& other) const {
- return this->fGenerationId != other.fGenerationId
- || this->fPixelOffset != other.fPixelOffset
- || this->fWidth != other.fWidth
- || this->fHeight != other.fHeight;
- }
+ /**
+ * Compare two LookupEntry pointers, returning -1, 0, 1 for sorting.
+ */
+ static int Compare(const LookupEntry* a, const LookupEntry* b);
};
/**
- * Remove the entry from the lookup table.
+ * Remove the entry from the lookup table. Also deletes the entry pointed
+ * to by the table. Therefore, if a pointer to that one was passed in, the
+ * pointer should no longer be used, since the object to which it points has
+ * been deleted.
* @return The index in the lookup table of the entry before removal.
*/
- int removeEntryFromLookupTable(const SkBitmapHeapEntry&);
+ int removeEntryFromLookupTable(LookupEntry*);
/**
* Searches for the bitmap in the lookup table and returns the bitmaps index within the table.
@@ -245,12 +238,27 @@ private:
*/
int findInLookupTable(const LookupEntry& key, SkBitmapHeapEntry** entry);
- SkBitmapHeapEntry* findEntryToReplace(const SkBitmap& replacement);
+ LookupEntry* findEntryToReplace(const SkBitmap& replacement);
bool copyBitmap(const SkBitmap& originalBitmap, SkBitmap& copiedBitmap);
- void setMostRecentlyUsed(SkBitmapHeapEntry* entry);
+
+ /**
+ * Remove a LookupEntry from the LRU, in preparation for either deleting or appending as most
+ * recent. Points the LookupEntry's old neighbors at each other, and sets fLeastRecentlyUsed
+ * (if there is still an entry left). Sets LookupEntry's fMoreRecentlyUsed to NULL and leaves
+ * its fLessRecentlyUsed unmodified.
+ */
+ void removeFromLRU(LookupEntry* entry);
+
+ /**
+ * Append a LookupEntry to the end of the LRU cache, marking it as the most
+ * recently used. Assumes that the LookupEntry is already in fLookupTable,
+ * but is not in the LRU cache. If it is in the cache, removeFromLRU should
+ * be called first.
+ */
+ void appendToLRU(LookupEntry*);
// searchable index that maps to entries in the heap
- SkTDArray<LookupEntry> fLookupTable;
+ SkTDArray<LookupEntry*> fLookupTable;
// heap storage
SkTDArray<SkBitmapHeapEntry*> fStorage;
@@ -259,8 +267,8 @@ private:
SkTDArray<int> fUnusedSlots;
ExternalStorage* fExternalStorage;
- SkBitmapHeapEntry* fMostRecentlyUsed;
- SkBitmapHeapEntry* fLeastRecentlyUsed;
+ LookupEntry* fMostRecentlyUsed;
+ LookupEntry* fLeastRecentlyUsed;
const int32_t fPreferredCount;
const int32_t fOwnerCount;
diff --git a/src/pipe/SkGPipeRead.cpp b/src/pipe/SkGPipeRead.cpp
index e485d500d1..9296e5cd6b 100644
--- a/src/pipe/SkGPipeRead.cpp
+++ b/src/pipe/SkGPipeRead.cpp
@@ -113,7 +113,6 @@ public:
}
void addBitmap(int index) {
- index--;
SkBitmap* bm;
if(fBitmaps.count() == index) {
bm = SkNEW(SkBitmap);
@@ -125,7 +124,7 @@ public:
}
SkBitmap* getBitmap(unsigned index) {
- return fBitmaps[index - 1];
+ return fBitmaps[index];
}
void setSharedHeap(SkBitmapHeap* heap) {
diff --git a/src/pipe/SkGPipeWrite.cpp b/src/pipe/SkGPipeWrite.cpp
index 128a458c9e..fbb67bedec 100644
--- a/src/pipe/SkGPipeWrite.cpp
+++ b/src/pipe/SkGPipeWrite.cpp
@@ -163,6 +163,15 @@ public:
if (this->needOpBytes()) {
this->writeOp(kDone_DrawOp);
this->doNotify();
+ if (shouldFlattenBitmaps(fFlags)) {
+ // In this case, a BitmapShuttle is reffed by the SharedHeap
+ // and refs this canvas. Unref the SharedHeap to end the
+ // circular reference. When shouldFlattenBitmaps is false,
+ // there is no circular reference, so the SharedHeap can be
+ // safely unreffed in the destructor.
+ fSharedHeap->unref();
+ fSharedHeap = NULL;
+ }
}
fDone = true;
}
@@ -172,11 +181,6 @@ public:
size_t freeMemoryIfPossible(size_t bytesToFree);
size_t storageAllocatedForRecording() {
- // FIXME: This can be removed once fSharedHeap is used by cross process
- // case.
- if (NULL == fSharedHeap) {
- return 0;
- }
return fSharedHeap->bytesAllocated();
}
@@ -231,6 +235,11 @@ public:
const SkPaint&) SK_OVERRIDE;
virtual void drawData(const void*, size_t) SK_OVERRIDE;
+ /**
+ * Flatten an SkBitmap to send to the reader, where it will be referenced
+ * according to slot.
+ */
+ bool shuttleBitmap(const SkBitmap&, int32_t slot);
private:
enum {
kNoSaveLayer = -1,
@@ -243,7 +252,7 @@ private:
size_t fBlockSize; // amount allocated for writer
size_t fBytesNotified;
bool fDone;
- uint32_t fFlags;
+ const uint32_t fFlags;
SkRefCntSet fTypefaceSet;
@@ -273,27 +282,15 @@ private:
// if a new SkFlatData was added when in cross process mode
void flattenFactoryNames();
- // These are only used when in cross process, but with no shared address
- // space, so bitmaps are flattened.
- FlattenableHeap fBitmapHeap;
- SkBitmapDictionary fBitmapDictionary;
- int flattenToIndex(const SkBitmap&);
-
FlattenableHeap fFlattenableHeap;
FlatDictionary fFlatDictionary;
int fCurrFlatIndex[kCount_PaintFlats];
int flattenToIndex(SkFlattenable* obj, PaintFlats);
- // Common code used by drawBitmap* when flattening.
- bool commonDrawBitmapFlatten(const SkBitmap& bm, DrawOps op, unsigned flags,
- size_t opBytesNeeded, const SkPaint* paint);
- // Common code used by drawBitmap* when storing in the heap.
- bool commonDrawBitmapHeap(const SkBitmap& bm, DrawOps op, unsigned flags,
- size_t opBytesNeeded, const SkPaint* paint);
- // Convenience type for function pointer
- typedef bool (SkGPipeCanvas::*BitmapCommonFunction)(const SkBitmap&,
- DrawOps, unsigned,
- size_t, const SkPaint*);
+ // Common code used by drawBitmap*. Behaves differently depending on the
+ // type of SkBitmapHeap being used, which is determined by the flags used.
+ bool commonDrawBitmap(const SkBitmap& bm, DrawOps op, unsigned flags,
+ size_t opBytesNeeded, const SkPaint* paint);
SkPaint fPaint;
void writePaint(const SkPaint&);
@@ -321,23 +318,20 @@ void SkGPipeCanvas::flattenFactoryNames() {
}
}
-int SkGPipeCanvas::flattenToIndex(const SkBitmap & bitmap) {
+bool SkGPipeCanvas::shuttleBitmap(const SkBitmap& bm, int32_t slot) {
SkASSERT(shouldFlattenBitmaps(fFlags));
- uint32_t flags = SkFlattenableWriteBuffer::kCrossProcess_Flag;
- bool added, replaced;
- const SkFlatData* flat = fBitmapDictionary.findAndReplace(
- bitmap, flags, fBitmapHeap.flatToReplace(), &added, &replaced);
-
- int index = flat->index();
- if (added) {
- this->flattenFactoryNames();
- size_t flatSize = flat->flatSize();
- if (this->needOpBytes(flatSize)) {
- this->writeOp(kDef_Bitmap_DrawOp, 0, index);
- fWriter.write(flat->data(), flatSize);
- }
+ SkOrderedWriteBuffer buffer(1024);
+ buffer.setNamedFactoryRecorder(fFactorySet);
+ bm.flatten(buffer);
+ this->flattenFactoryNames();
+ uint32_t size = buffer.size();
+ if (this->needOpBytes(size)) {
+ this->writeOp(kDef_Bitmap_DrawOp, 0, slot);
+ void* dst = static_cast<void*>(fWriter.reserve(size));
+ buffer.writeToMemory(dst);
+ return true;
}
- return index;
+ return false;
}
// return 0 for NULL (or unflattenable obj), or index-base-1
@@ -377,6 +371,24 @@ int SkGPipeCanvas::flattenToIndex(SkFlattenable* obj, PaintFlats paintflat) {
///////////////////////////////////////////////////////////////////////////////
+/**
+ * If SkBitmaps are to be flattened to send to the reader, this class is
+ * provided to the SkBitmapHeap to tell the SkGPipeCanvas to do so.
+ */
+class BitmapShuttle : public SkBitmapHeap::ExternalStorage {
+public:
+ BitmapShuttle(SkGPipeCanvas*);
+
+ ~BitmapShuttle();
+
+ virtual bool insert(const SkBitmap& bitmap, int32_t slot) SK_OVERRIDE;
+
+private:
+ SkGPipeCanvas* fCanvas;
+};
+
+///////////////////////////////////////////////////////////////////////////////
+
#define MIN_BLOCK_SIZE (16 * 1024)
#define BITMAPS_TO_KEEP 5
#define FLATTENABLES_TO_KEEP 10
@@ -386,8 +398,6 @@ SkGPipeCanvas::SkGPipeCanvas(SkGPipeController* controller,
: fFactorySet(isCrossProcess(flags) ? SkNEW(SkNamedFactorySet) : NULL)
, fWriter(*writer)
, fFlags(flags)
-, fBitmapHeap(BITMAPS_TO_KEEP, fFactorySet)
-, fBitmapDictionary(&fBitmapHeap)
, fFlattenableHeap(FLATTENABLES_TO_KEEP, fFactorySet)
, fFlatDictionary(&fFlattenableHeap) {
fController = controller;
@@ -411,10 +421,12 @@ SkGPipeCanvas::SkGPipeCanvas(SkGPipeController* controller,
}
if (shouldFlattenBitmaps(flags)) {
- // TODO: Use the shared heap for cross process case as well.
- fSharedHeap = NULL;
+ BitmapShuttle* shuttle = SkNEW_ARGS(BitmapShuttle, (this));
+ fSharedHeap = SkNEW_ARGS(SkBitmapHeap, (shuttle, BITMAPS_TO_KEEP));
+ shuttle->unref();
} else {
- fSharedHeap = SkNEW_ARGS(SkBitmapHeap, (5, controller->numberOfReaders()));
+ fSharedHeap = SkNEW_ARGS(SkBitmapHeap,
+ (BITMAPS_TO_KEEP, controller->numberOfReaders()));
if (this->needOpBytes(sizeof(void*))) {
this->writeOp(kShareHeap_DrawOp);
fWriter.writePtr(static_cast<void*>(fSharedHeap));
@@ -426,8 +438,6 @@ SkGPipeCanvas::SkGPipeCanvas(SkGPipeController* controller,
SkGPipeCanvas::~SkGPipeCanvas() {
this->finish();
SkSafeUnref(fFactorySet);
- // FIXME: This can be changed to unref() once fSharedHeap is used by cross
- // process case.
SkSafeUnref(fSharedHeap);
}
@@ -682,26 +692,10 @@ void SkGPipeCanvas::drawPath(const SkPath& path, const SkPaint& paint) {
}
}
-bool SkGPipeCanvas::commonDrawBitmapFlatten(const SkBitmap& bm, DrawOps op,
- unsigned flags,
- size_t opBytesNeeded,
- const SkPaint* paint) {
- if (paint != NULL) {
- flags |= kDrawBitmap_HasPaint_DrawOpsFlag;
- this->writePaint(*paint);
- }
- int bitmapIndex = this->flattenToIndex(bm);
- if (this->needOpBytes(opBytesNeeded)) {
- this->writeOp(op, flags, bitmapIndex);
- return true;
- }
- return false;
-}
-
-bool SkGPipeCanvas::commonDrawBitmapHeap(const SkBitmap& bm, DrawOps op,
- unsigned flags,
- size_t opBytesNeeded,
- const SkPaint* paint) {
+bool SkGPipeCanvas::commonDrawBitmap(const SkBitmap& bm, DrawOps op,
+ unsigned flags,
+ size_t opBytesNeeded,
+ const SkPaint* paint) {
int32_t bitmapIndex = fSharedHeap->insert(bm);
if (SkBitmapHeap::INVALID_SLOT == bitmapIndex) {
return false;
@@ -722,11 +716,7 @@ void SkGPipeCanvas::drawBitmap(const SkBitmap& bm, SkScalar left, SkScalar top,
NOTIFY_SETUP(this);
size_t opBytesNeeded = sizeof(SkScalar) * 2;
- BitmapCommonFunction bitmapCommon = shouldFlattenBitmaps(fFlags) ?
- &SkGPipeCanvas::commonDrawBitmapFlatten :
- &SkGPipeCanvas::commonDrawBitmapHeap;
-
- if ((*this.*bitmapCommon)(bm, kDrawBitmap_DrawOp, 0, opBytesNeeded, paint)) {
+ if (this->commonDrawBitmap(bm, kDrawBitmap_DrawOp, 0, opBytesNeeded, paint)) {
fWriter.writeScalar(left);
fWriter.writeScalar(top);
}
@@ -744,12 +734,8 @@ void SkGPipeCanvas::drawBitmapRect(const SkBitmap& bm, const SkIRect* src,
} else {
flags = 0;
}
-
- BitmapCommonFunction bitmapCommon = shouldFlattenBitmaps(fFlags) ?
- &SkGPipeCanvas::commonDrawBitmapFlatten :
- &SkGPipeCanvas::commonDrawBitmapHeap;
- if ((*this.*bitmapCommon)(bm, kDrawBitmapRect_DrawOp, flags, opBytesNeeded, paint)) {
+ if (this->commonDrawBitmap(bm, kDrawBitmapRect_DrawOp, flags, opBytesNeeded, paint)) {
if (hasSrc) {
fWriter.write32(src->fLeft);
fWriter.write32(src->fTop);
@@ -765,11 +751,7 @@ void SkGPipeCanvas::drawBitmapMatrix(const SkBitmap& bm, const SkMatrix& matrix,
NOTIFY_SETUP(this);
size_t opBytesNeeded = matrix.writeToMemory(NULL);
- BitmapCommonFunction bitmapCommon = shouldFlattenBitmaps(fFlags) ?
- &SkGPipeCanvas::commonDrawBitmapFlatten :
- &SkGPipeCanvas::commonDrawBitmapHeap;
-
- if ((*this.*bitmapCommon)(bm, kDrawBitmapMatrix_DrawOp, 0, opBytesNeeded, paint)) {
+ if (this->commonDrawBitmap(bm, kDrawBitmapMatrix_DrawOp, 0, opBytesNeeded, paint)) {
fWriter.writeMatrix(matrix);
}
}
@@ -779,11 +761,7 @@ void SkGPipeCanvas::drawBitmapNine(const SkBitmap& bm, const SkIRect& center,
NOTIFY_SETUP(this);
size_t opBytesNeeded = sizeof(int32_t) * 4 + sizeof(SkRect);
- BitmapCommonFunction bitmapCommon = shouldFlattenBitmaps(fFlags) ?
- &SkGPipeCanvas::commonDrawBitmapFlatten :
- &SkGPipeCanvas::commonDrawBitmapHeap;
-
- if ((*this.*bitmapCommon)(bm, kDrawBitmapNine_DrawOp, 0, opBytesNeeded, paint)) {
+ if (this->commonDrawBitmap(bm, kDrawBitmapNine_DrawOp, 0, opBytesNeeded, paint)) {
fWriter.write32(center.fLeft);
fWriter.write32(center.fTop);
fWriter.write32(center.fRight);
@@ -797,11 +775,7 @@ void SkGPipeCanvas::drawSprite(const SkBitmap& bm, int left, int top,
NOTIFY_SETUP(this);
size_t opBytesNeeded = sizeof(int32_t) * 2;
- BitmapCommonFunction bitmapCommon = shouldFlattenBitmaps(fFlags) ?
- &SkGPipeCanvas::commonDrawBitmapFlatten :
- &SkGPipeCanvas::commonDrawBitmapHeap;
-
- if ((*this.*bitmapCommon)(bm, kDrawSprite_DrawOp, 0, opBytesNeeded, paint)) {
+ if (this->commonDrawBitmap(bm, kDrawSprite_DrawOp, 0, opBytesNeeded, paint)) {
fWriter.write32(left);
fWriter.write32(top);
}
@@ -960,11 +934,6 @@ void SkGPipeCanvas::flushRecording(bool detachCurrentBlock) {
}
size_t SkGPipeCanvas::freeMemoryIfPossible(size_t bytesToFree) {
- // FIXME: This can be removed once fSharedHeap is used by cross process
- // case.
- if (NULL == fSharedHeap) {
- return 0;
- }
return fSharedHeap->freeMemoryIfPossible(bytesToFree);
}
@@ -1145,3 +1114,18 @@ size_t SkGPipeWriter::storageAllocatedForRecording() const {
return NULL == fCanvas ? 0 : fCanvas->storageAllocatedForRecording();
}
+///////////////////////////////////////////////////////////////////////////////
+
+BitmapShuttle::BitmapShuttle(SkGPipeCanvas* canvas) {
+ SkASSERT(canvas != NULL);
+ fCanvas = canvas;
+ fCanvas->ref();
+}
+
+BitmapShuttle::~BitmapShuttle() {
+ fCanvas->unref();
+}
+
+bool BitmapShuttle::insert(const SkBitmap& bitmap, int32_t slot) {
+ return fCanvas->shuttleBitmap(bitmap, slot);
+}