diff options
author | 2016-03-21 06:55:37 -0700 | |
---|---|---|
committer | 2016-03-21 06:55:37 -0700 | |
commit | a6359365887048ef055196de75591311d7a015f0 (patch) | |
tree | 03d964f0818dc7ba10cba8a6d0c7f919c45b0804 /src/gpu/gl/GrGLCaps.cpp | |
parent | f8ee67edb8e5fed34bf4d5208bce89ea8be1ac1e (diff) |
sRGB support in Ganesh. Several pieces:
sRGB support now also requires GL_EXT_texture_sRGB_decode, which allows
us to disable sRGB -> Linear conversion when reading textures. This gives
us an easy way to support "legacy" L32 mode. We disable decoding based on
the pixel config of the render target. Textures can override that behavior
(specifically for format-conversion draws where we want that behavior).
Added sBGRA pixel config, which is not-really-a-format. It's just sRGBA
internally, and the external format is BGR order, so TexImage calls will
swizzle correctly. This lets us interact with sRGB raster surfaces on BGR
platforms.
Devices without sRGB support behave like they always have: conversion from
color type and profile type ignores sRGB and always returns linear pixel
configs.
BUG=skia:
GOLD_TRYBOT_URL= https://gold.skia.org/search2?unt=true&query=source_type%3Dgm&master=false&issue=1789663002
Review URL: https://codereview.chromium.org/1789663002
Diffstat (limited to 'src/gpu/gl/GrGLCaps.cpp')
-rw-r--r-- | src/gpu/gl/GrGLCaps.cpp | 43 |
1 files changed, 36 insertions, 7 deletions
diff --git a/src/gpu/gl/GrGLCaps.cpp b/src/gpu/gl/GrGLCaps.cpp index 40885359fb..af31480ba7 100644 --- a/src/gpu/gl/GrGLCaps.cpp +++ b/src/gpu/gl/GrGLCaps.cpp @@ -1435,27 +1435,31 @@ void GrGLCaps::initConfigTable(const GrGLContextInfo& ctxInfo, const GrGLInterfa } fConfigTable[kBGRA_8888_GrPixelConfig].fSwizzle = GrSwizzle::RGBA(); - // We only enable srgb support if both textures and FBOs support srgb. - bool srgbSupport = false; + // We only enable srgb support if both textures and FBOs support srgb, + // *and* we can disable sRGB decode-on-read, to support "legacy" mode. if (kGL_GrGLStandard == standard) { if (ctxInfo.version() >= GR_GL_VER(3,0)) { - srgbSupport = true; + fSRGBSupport = true; } else if (ctxInfo.hasExtension("GL_EXT_texture_sRGB")) { if (ctxInfo.hasExtension("GL_ARB_framebuffer_sRGB") || ctxInfo.hasExtension("GL_EXT_framebuffer_sRGB")) { - srgbSupport = true; + fSRGBSupport = true; } } // All the above srgb extensions support toggling srgb writes - fSRGBWriteControl = srgbSupport; + fSRGBWriteControl = fSRGBSupport; } else { // See https://bug.skia.org/4148 for PowerVR issue. - srgbSupport = kPowerVRRogue_GrGLRenderer != ctxInfo.renderer() && + fSRGBSupport = kPowerVRRogue_GrGLRenderer != ctxInfo.renderer() && (ctxInfo.version() >= GR_GL_VER(3,0) || ctxInfo.hasExtension("GL_EXT_sRGB")); // ES through 3.1 requires EXT_srgb_write_control to support toggling // sRGB writing for destinations. fSRGBWriteControl = ctxInfo.hasExtension("GL_EXT_sRGB_write_control"); } + if (!ctxInfo.hasExtension("GL_EXT_texture_sRGB_decode")) { + // To support "legacy" L32 mode, we require the ability to turn off sRGB decode: + fSRGBSupport = false; + } fConfigTable[kSRGBA_8888_GrPixelConfig].fFormats.fBaseInternalFormat = GR_GL_SRGB_ALPHA; fConfigTable[kSRGBA_8888_GrPixelConfig].fFormats.fSizedInternalFormat = GR_GL_SRGB8_ALPHA8; // GL does not do srgb<->rgb conversions when transferring between cpu and gpu. Thus, the @@ -1464,7 +1468,7 @@ void GrGLCaps::initConfigTable(const GrGLContextInfo& ctxInfo, const GrGLInterfa GR_GL_RGBA; fConfigTable[kSRGBA_8888_GrPixelConfig].fFormats.fExternalType = GR_GL_UNSIGNED_BYTE; fConfigTable[kSRGBA_8888_GrPixelConfig].fFormatType = kNormalizedFixedPoint_FormatType; - if (srgbSupport) { + if (fSRGBSupport) { fConfigTable[kSRGBA_8888_GrPixelConfig].fFlags = ConfigInfo::kTextureable_Flag | allRenderFlags; } @@ -1473,6 +1477,26 @@ void GrGLCaps::initConfigTable(const GrGLContextInfo& ctxInfo, const GrGLInterfa } fConfigTable[kSRGBA_8888_GrPixelConfig].fSwizzle = GrSwizzle::RGBA(); + // sBGRA is not a "real" thing in OpenGL, but GPUs support it, and on platforms where + // kN32 == BGRA, we need some way to work with it. (The default framebuffer on Windows + // is in this format, for example). + fConfigTable[kSBGRA_8888_GrPixelConfig].fFormats.fBaseInternalFormat = GR_GL_SRGB_ALPHA; + fConfigTable[kSBGRA_8888_GrPixelConfig].fFormats.fSizedInternalFormat = GR_GL_SRGB8_ALPHA8; + // GL does not do srgb<->rgb conversions when transferring between cpu and gpu. Thus, the + // external format is GL_BGRA. + fConfigTable[kSBGRA_8888_GrPixelConfig].fFormats.fExternalFormat[kOther_ExternalFormatUsage] = + GR_GL_BGRA; + fConfigTable[kSBGRA_8888_GrPixelConfig].fFormats.fExternalType = GR_GL_UNSIGNED_BYTE; + fConfigTable[kSBGRA_8888_GrPixelConfig].fFormatType = kNormalizedFixedPoint_FormatType; + if (fSRGBSupport) { + fConfigTable[kSBGRA_8888_GrPixelConfig].fFlags = ConfigInfo::kTextureable_Flag | + allRenderFlags; + } + if (texStorageSupported) { + fConfigTable[kSBGRA_8888_GrPixelConfig].fFlags |= ConfigInfo::kCanUseTexStorage_Flag; + } + fConfigTable[kSBGRA_8888_GrPixelConfig].fSwizzle = GrSwizzle::RGBA(); + fConfigTable[kRGB_565_GrPixelConfig].fFormats.fBaseInternalFormat = GR_GL_RGB; if (this->ES2CompatibilitySupport()) { fConfigTable[kRGB_565_GrPixelConfig].fFormats.fSizedInternalFormat = GR_GL_RGB565; @@ -1792,6 +1816,11 @@ void GrGLCaps::initConfigTable(const GrGLContextInfo& ctxInfo, const GrGLInterfa if (ctxInfo.standard() == kGLES_GrGLStandard && ctxInfo.version() == GR_GL_VER(2,0)) { fConfigTable[kSRGBA_8888_GrPixelConfig].fFormats.fExternalFormat[kTexImage_ExternalFormatUsage] = GR_GL_SRGB_ALPHA; + + // Additionally, because we had to "invent" sBGRA, there is no way to make it work + // in ES 2.0, because there is no <internalFormat> we can use. So just make that format + // unsupported. (If we have no sRGB support at all, this will get overwritten below). + fConfigTable[kSBGRA_8888_GrPixelConfig].fFlags = 0; } // If BGRA is supported as an internal format it must always be specified to glTex[Sub]Image |