aboutsummaryrefslogtreecommitdiffhomepage
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/gpu/GrContext.cpp16
-rw-r--r--src/gpu/GrGpuFactory.cpp29
-rw-r--r--src/gpu/mock/GrMockGpu.h154
3 files changed, 171 insertions, 28 deletions
diff --git a/src/gpu/GrContext.cpp b/src/gpu/GrContext.cpp
index b0d40f6668..405668d4c4 100644
--- a/src/gpu/GrContext.cpp
+++ b/src/gpu/GrContext.cpp
@@ -53,14 +53,12 @@ GrContext* GrContext::Create(GrBackend backend, GrBackendContext backendContext)
GrContext* GrContext::Create(GrBackend backend, GrBackendContext backendContext,
const GrContextOptions& options) {
- GrContext* context = new GrContext;
+ sk_sp<GrContext> context(new GrContext);
- if (context->init(backend, backendContext, options)) {
- return context;
- } else {
- context->unref();
+ if (!context->init(backend, backendContext, options)) {
return nullptr;
}
+ return context.release();
}
static int32_t gNextID = 1;
@@ -91,12 +89,6 @@ bool GrContext::init(GrBackend backend, GrBackendContext backendContext,
if (!fGpu) {
return false;
}
- this->initCommon(options);
- return true;
-}
-
-void GrContext::initCommon(const GrContextOptions& options) {
- ASSERT_SINGLE_OWNER
fCaps = SkRef(fGpu->caps());
fResourceCache = new GrResourceCache(fCaps, fUniqueID);
@@ -113,6 +105,8 @@ void GrContext::initCommon(const GrContextOptions& options) {
fAtlasGlyphCache = new GrAtlasGlyphCache(this, options.fGlyphCacheTextureMaximumBytes);
fTextBlobCache.reset(new GrTextBlobCache(TextBlobCacheOverBudgetCB, this));
+
+ return true;
}
GrContext::~GrContext() {
diff --git a/src/gpu/GrGpuFactory.cpp b/src/gpu/GrGpuFactory.cpp
index c6134b65a2..1fc19c144a 100644
--- a/src/gpu/GrGpuFactory.cpp
+++ b/src/gpu/GrGpuFactory.cpp
@@ -7,31 +7,26 @@
#include "GrGpuFactory.h"
-
-#include "GrGpu.h"
-#include "gl/GrGLConfig.h"
#include "gl/GrGLGpu.h"
+#include "mock/GrMockGpu.h"
#ifdef SK_VULKAN
#include "vk/GrVkGpu.h"
#endif
-static CreateGpuProc gGpuFactories[kBackendCount] = { GrGLGpu::Create, nullptr };
-
-#ifdef SK_VULKAN
-GrGpuFactoryRegistrar gVkGpuFactoryProc(kVulkan_GrBackend, GrVkGpu::Create);
-#endif
-
-GrGpuFactoryRegistrar::GrGpuFactoryRegistrar(int i, CreateGpuProc proc) {
- gGpuFactories[i] = proc;
-}
-
GrGpu* GrGpu::Create(GrBackend backend,
GrBackendContext backendContext,
const GrContextOptions& options,
GrContext* context) {
- SkASSERT((int)backend < kBackendCount);
- if (!gGpuFactories[backend]) {
- return nullptr;
+ switch (backend) {
+ case kOpenGL_GrBackend:
+ return GrGLGpu::Create(backendContext, options, context);
+#ifdef SK_VULKAN
+ case kVulkan_GrBackend:
+ return GrVkGpu::Create(backendContext, options, context);
+#endif
+ case kMock_GrBackend:
+ return GrMockGpu::Create(backendContext, options, context);
+ default:
+ return nullptr;
}
- return (gGpuFactories[backend])(backendContext, options, context);
}
diff --git a/src/gpu/mock/GrMockGpu.h b/src/gpu/mock/GrMockGpu.h
new file mode 100644
index 0000000000..eb0a74a7f3
--- /dev/null
+++ b/src/gpu/mock/GrMockGpu.h
@@ -0,0 +1,154 @@
+/*
+ * Copyright 2017 Google Inc.
+ *
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#include "GrGpu.h"
+#include "GrSemaphore.h"
+
+class GrPipeline;
+
+class GrMockCaps : public GrCaps {
+public:
+ explicit GrMockCaps(const GrContextOptions& options) : INHERITED(options) {
+ fBufferMapThreshold = SK_MaxS32;
+ }
+ bool isConfigTexturable(GrPixelConfig) const override { return false; }
+ bool isConfigRenderable(GrPixelConfig config, bool withMSAA) const override { return false; }
+ bool canConfigBeImageStorage(GrPixelConfig) const override { return false; }
+ bool initDescForDstCopy(const GrRenderTargetProxy* src, GrSurfaceDesc* desc,
+ bool* rectsMustMatch, bool* disallowSubrect) const override {
+ return false;
+ }
+
+private:
+ typedef GrCaps INHERITED;
+};
+
+class GrMockGpu : public GrGpu {
+public:
+ static GrGpu* Create(GrBackendContext backendContext, const GrContextOptions& options,
+ GrContext* context) {
+ SkASSERT((void*)backendContext == nullptr);
+ return new GrMockGpu(context, options);
+ }
+
+ ~GrMockGpu() override {}
+
+ bool onGetReadPixelsInfo(GrSurface* srcSurface, int readWidth, int readHeight, size_t rowBytes,
+ GrPixelConfig readConfig, DrawPreference*,
+ ReadPixelTempDrawInfo*) override { return false; }
+
+ bool onGetWritePixelsInfo(GrSurface* dstSurface, int width, int height,
+ GrPixelConfig srcConfig, DrawPreference*,
+ WritePixelTempDrawInfo*) override { return false; }
+
+ bool onCopySurface(GrSurface* dst,
+ GrSurface* src,
+ const SkIRect& srcRect,
+ const SkIPoint& dstPoint) override { return false; }
+
+ void onQueryMultisampleSpecs(GrRenderTarget* rt, const GrStencilSettings&,
+ int* effectiveSampleCnt, SamplePattern*) override {
+ *effectiveSampleCnt = rt->numStencilSamples();
+ }
+
+ GrGpuCommandBuffer* createCommandBuffer(const GrGpuCommandBuffer::LoadAndStoreInfo&,
+ const GrGpuCommandBuffer::LoadAndStoreInfo&) override {
+ return nullptr;
+ }
+
+ GrFence SK_WARN_UNUSED_RESULT insertFence() override { return 0; }
+ bool waitFence(GrFence, uint64_t) override { return true; }
+ void deleteFence(GrFence) const override {}
+
+ sk_sp<GrSemaphore> SK_WARN_UNUSED_RESULT makeSemaphore(bool isOwned) override {
+ return nullptr;
+ }
+ sk_sp<GrSemaphore> wrapBackendSemaphore(const GrBackendSemaphore& semaphore,
+ GrWrapOwnership ownership) override { return nullptr; }
+ void insertSemaphore(sk_sp<GrSemaphore> semaphore, bool flush) override {}
+ void waitSemaphore(sk_sp<GrSemaphore> semaphore) override {}
+ sk_sp<GrSemaphore> prepareTextureForCrossContextUsage(GrTexture*) override { return nullptr; }
+
+private:
+ GrMockGpu(GrContext* context, const GrContextOptions& options) : INHERITED(context) {
+ fCaps.reset(new GrMockCaps(options));
+ }
+
+ void onResetContext(uint32_t resetBits) override {}
+
+ void xferBarrier(GrRenderTarget*, GrXferBarrierType) override {}
+
+ sk_sp<GrTexture> onCreateTexture(const GrSurfaceDesc& desc, SkBudgeted budgeted,
+ const SkTArray<GrMipLevel>& texels) override {
+ return nullptr;
+ }
+
+ sk_sp<GrTexture> onWrapBackendTexture(const GrBackendTexture&,
+ GrSurfaceOrigin,
+ GrBackendTextureFlags,
+ int sampleCnt,
+ GrWrapOwnership) override {
+ return nullptr;
+ }
+
+ sk_sp<GrRenderTarget> onWrapBackendRenderTarget(const GrBackendRenderTarget&,
+ GrSurfaceOrigin) override {
+ return nullptr;
+ }
+
+ sk_sp<GrRenderTarget> onWrapBackendTextureAsRenderTarget(const GrBackendTexture&,
+ GrSurfaceOrigin,
+ int sampleCnt) override {
+ return nullptr;
+ }
+
+ GrBuffer* onCreateBuffer(size_t, GrBufferType, GrAccessPattern, const void*) override {
+ return nullptr;
+ }
+
+ gr_instanced::InstancedRendering* onCreateInstancedRendering() override { return nullptr; }
+
+ bool onReadPixels(GrSurface* surface,
+ int left, int top, int width, int height,
+ GrPixelConfig,
+ void* buffer,
+ size_t rowBytes) override {
+ return false;
+ }
+
+ bool onWritePixels(GrSurface* surface,
+ int left, int top, int width, int height,
+ GrPixelConfig config, const SkTArray<GrMipLevel>& texels) override {
+ return false;
+ }
+
+ bool onTransferPixels(GrTexture* texture,
+ int left, int top, int width, int height,
+ GrPixelConfig config, GrBuffer* transferBuffer,
+ size_t offset, size_t rowBytes) override {
+ return false;
+ }
+
+ void onResolveRenderTarget(GrRenderTarget* target) override { return; }
+
+ GrStencilAttachment* createStencilAttachmentForRenderTarget(const GrRenderTarget*,
+ int width,
+ int height) override {
+ return nullptr;
+ }
+
+ void clearStencil(GrRenderTarget* target) override {}
+
+ GrBackendObject createTestingOnlyBackendTexture(void* pixels, int w, int h,
+ GrPixelConfig config, bool isRT) override {
+ return 0;
+ }
+ bool isTestingOnlyBackendTexture(GrBackendObject ) const override { return false; }
+ void deleteTestingOnlyBackendTexture(GrBackendObject, bool abandonTexture) override {}
+
+ typedef GrGpu INHERITED;
+};