diff options
Diffstat (limited to 'src/gpu')
-rw-r--r-- | src/gpu/GrResourceProvider.cpp | 2 | ||||
-rw-r--r-- | src/gpu/gl/GrGLCaps.cpp | 18 | ||||
-rw-r--r-- | src/gpu/vk/GrVkTexture.cpp | 6 |
3 files changed, 14 insertions, 12 deletions
diff --git a/src/gpu/GrResourceProvider.cpp b/src/gpu/GrResourceProvider.cpp index 53b9b46d87..c75f298807 100644 --- a/src/gpu/GrResourceProvider.cpp +++ b/src/gpu/GrResourceProvider.cpp @@ -65,7 +65,7 @@ bool validate_desc(const GrSurfaceDesc& desc, const GrCaps& caps, int levelCount return false; } } - if (levelCount > 1 && GrPixelConfigIsSint(desc.fConfig)) { + if (levelCount > 1 && (GrPixelConfigIsSint(desc.fConfig) || !caps.mipMapSupport())) { return false; } return true; diff --git a/src/gpu/gl/GrGLCaps.cpp b/src/gpu/gl/GrGLCaps.cpp index 521e2553f0..cb675bdb85 100644 --- a/src/gpu/gl/GrGLCaps.cpp +++ b/src/gpu/gl/GrGLCaps.cpp @@ -1571,14 +1571,16 @@ void GrGLCaps::initConfigTable(const GrContextOptions& contextOptions, fConfigTable[kBGRA_8888_GrPixelConfig].fFormats.fBaseInternalFormat = GR_GL_BGRA; fConfigTable[kBGRA_8888_GrPixelConfig].fFormats.fSizedInternalFormat = GR_GL_BGRA8; if (ctxInfo.hasExtension("GL_APPLE_texture_format_BGRA8888")) { - // The APPLE extension doesn't make this renderable. - fConfigTable[kBGRA_8888_GrPixelConfig].fFlags = ConfigInfo::kTextureable_Flag; - if (version < GR_GL_VER(3,0) && !ctxInfo.hasExtension("GL_EXT_texture_storage")) { - // On ES2 the internal format of a BGRA texture is RGBA with the APPLE extension. - // Though, that seems to not be the case if the texture storage extension is - // present. The specs don't exactly make that clear. - fConfigTable[kBGRA_8888_GrPixelConfig].fFormats.fBaseInternalFormat = GR_GL_RGBA; - fConfigTable[kBGRA_8888_GrPixelConfig].fFormats.fSizedInternalFormat = GR_GL_RGBA8; + // This APPLE extension introduces complexity on ES2. It leaves the internal format + // as RGBA, but allows BGRA as the external format. From testing, it appears that the + // driver remembers the external format when the texture is created (with TexImage). + // If you then try to upload data in the other swizzle (with TexSubImage), it fails. + // We could work around this, but it adds even more state tracking to code that is + // already too tricky. Instead, we opt not to support BGRA on ES2 with this extension. + // This also side-steps some ambiguous interactions with the texture storage extension. + if (version >= GR_GL_VER(3,0)) { + // The APPLE extension doesn't make this renderable. + fConfigTable[kBGRA_8888_GrPixelConfig].fFlags = ConfigInfo::kTextureable_Flag; } } else if (ctxInfo.hasExtension("GL_EXT_texture_format_BGRA8888")) { fConfigTable[kBGRA_8888_GrPixelConfig].fFlags = ConfigInfo::kTextureable_Flag | diff --git a/src/gpu/vk/GrVkTexture.cpp b/src/gpu/vk/GrVkTexture.cpp index 826f091ebe..1525df34a1 100644 --- a/src/gpu/vk/GrVkTexture.cpp +++ b/src/gpu/vk/GrVkTexture.cpp @@ -34,7 +34,7 @@ GrVkTexture::GrVkTexture(GrVkGpu* gpu, : GrSurface(gpu, desc) , GrVkImage(info, GrVkImage::kNot_Wrapped) , INHERITED(gpu, desc, kTexture2DSampler_GrSLType, highest_filter_mode(desc.fConfig), - desc.fIsMipMapped) + info.fLevelCount > 1) , fTextureView(view) , fLinearTextureView(nullptr) { this->registerWithCache(budgeted); @@ -49,7 +49,7 @@ GrVkTexture::GrVkTexture(GrVkGpu* gpu, : GrSurface(gpu, desc) , GrVkImage(info, wrapped) , INHERITED(gpu, desc, kTexture2DSampler_GrSLType, highest_filter_mode(desc.fConfig), - desc.fIsMipMapped) + info.fLevelCount > 1) , fTextureView(view) , fLinearTextureView(nullptr) { this->registerWithCacheWrapped(); @@ -64,7 +64,7 @@ GrVkTexture::GrVkTexture(GrVkGpu* gpu, : GrSurface(gpu, desc) , GrVkImage(info, wrapped) , INHERITED(gpu, desc, kTexture2DSampler_GrSLType, highest_filter_mode(desc.fConfig), - desc.fIsMipMapped) + info.fLevelCount > 1) , fTextureView(view) , fLinearTextureView(nullptr) { } |