aboutsummaryrefslogtreecommitdiffhomepage
path: root/src/gpu/gl/GrGLCaps.cpp
diff options
context:
space:
mode:
authorGravatar Brian Salomon <bsalomon@google.com>2016-11-11 16:08:03 -0500
committerGravatar Skia Commit-Bot <skia-commit-bot@chromium.org>2016-11-11 21:58:52 +0000
commitbf7b620b1e44985b164a8bd68031a7613fe0bb9b (patch)
tree39c26fc481302f134d6b9079491dcbb59eecb6bf /src/gpu/gl/GrGLCaps.cpp
parent06dd5c77a6ca6c484c683f1006a7bb6742f8917e (diff)
Revert "Revert "Add integer texture support.""
This reverts commit 9c7edb8311409a141b0dc1d5e480c68c629f1997. Fixes ASAN errors GOLD_TRYBOT_URL= https://gold.skia.org/search?issue=4736 Change-Id: I1b1dae754d357b01da7169c8e7c59d7d8d8a10f6 Reviewed-on: https://skia-review.googlesource.com/4736 Reviewed-by: Brian Salomon <bsalomon@google.com> Commit-Queue: Brian Salomon <bsalomon@google.com>
Diffstat (limited to 'src/gpu/gl/GrGLCaps.cpp')
-rw-r--r--src/gpu/gl/GrGLCaps.cpp62
1 files changed, 51 insertions, 11 deletions
diff --git a/src/gpu/gl/GrGLCaps.cpp b/src/gpu/gl/GrGLCaps.cpp
index a37d0e093f..ed3c38180e 100644
--- a/src/gpu/gl/GrGLCaps.cpp
+++ b/src/gpu/gl/GrGLCaps.cpp
@@ -863,6 +863,10 @@ bool GrGLCaps::readPixelsSupported(GrPixelConfig surfaceConfig,
return false;
}
+ if (GrPixelConfigIsSint(surfaceConfig) != GrPixelConfigIsSint(readConfig)) {
+ return false;
+ }
+
GrGLenum readFormat;
GrGLenum readType;
if (!this->getReadPixelsFormat(surfaceConfig, readConfig, &readFormat, &readType)) {
@@ -874,8 +878,11 @@ bool GrGLCaps::readPixelsSupported(GrPixelConfig surfaceConfig,
// the manual (https://www.opengl.org/sdk/docs/man/) says only these formats are allowed:
// GL_STENCIL_INDEX, GL_DEPTH_COMPONENT, GL_DEPTH_STENCIL, GL_RED, GL_GREEN, GL_BLUE,
// GL_RGB, GL_BGR, GL_RGBA, and GL_BGRA. We check for the subset that we would use.
+ // The manual does not seem to fully match the spec as the spec allows integer formats
+ // when the bound color buffer is an integer buffer. It doesn't specify which integer
+ // formats are allowed, so perhaps all of them are. We only use GL_RGBA_INTEGER currently.
if (readFormat != GR_GL_RED && readFormat != GR_GL_RGB && readFormat != GR_GL_RGBA &&
- readFormat != GR_GL_BGRA) {
+ readFormat != GR_GL_BGRA && readFormat != GR_GL_RGBA_INTEGER) {
return false;
}
// There is also a set of allowed types, but all the types we use are in the set:
@@ -890,16 +897,22 @@ bool GrGLCaps::readPixelsSupported(GrPixelConfig surfaceConfig,
}
// See Section 16.1.2 in the ES 3.2 specification.
-
- if (kNormalizedFixedPoint_FormatType == fConfigTable[surfaceConfig].fFormatType) {
- if (GR_GL_RGBA == readFormat && GR_GL_UNSIGNED_BYTE == readType) {
- return true;
- }
- } else {
- SkASSERT(kFloat_FormatType == fConfigTable[surfaceConfig].fFormatType);
- if (GR_GL_RGBA == readFormat && GR_GL_FLOAT == readType) {
- return true;
- }
+ switch (fConfigTable[surfaceConfig].fFormatType) {
+ case kNormalizedFixedPoint_FormatType:
+ if (GR_GL_RGBA == readFormat && GR_GL_UNSIGNED_BYTE == readType) {
+ return true;
+ }
+ break;
+ case kInteger_FormatType:
+ if (GR_GL_RGBA_INTEGER == readFormat && GR_GL_INT == readType) {
+ return true;
+ }
+ break;
+ case kFloat_FormatType:
+ if (GR_GL_RGBA == readFormat && GR_GL_FLOAT == readType) {
+ return true;
+ }
+ break;
}
if (0 == fConfigTable[surfaceConfig].fSecondReadPixelsFormat.fFormat) {
@@ -1569,6 +1582,33 @@ void GrGLCaps::initConfigTable(const GrGLContextInfo& ctxInfo, const GrGLInterfa
}
fConfigTable[kSBGRA_8888_GrPixelConfig].fSwizzle = GrSwizzle::RGBA();
+ bool hasIntegerTextures;
+ if (standard == kGL_GrGLStandard) {
+ hasIntegerTextures = version >= GR_GL_VER(3, 0) ||
+ ctxInfo.hasExtension("GL_EXT_texture_integer");
+ } else {
+ hasIntegerTextures = (version >= GR_GL_VER(3, 0));
+ }
+ // We may have limited GLSL to an earlier version that doesn't have integer sampler types.
+ if (ctxInfo.glslGeneration() == k110_GrGLSLGeneration) {
+ hasIntegerTextures = false;
+ }
+ fConfigTable[kRGBA_8888_sint_GrPixelConfig].fFormats.fBaseInternalFormat = GR_GL_RGBA_INTEGER;
+ fConfigTable[kRGBA_8888_sint_GrPixelConfig].fFormats.fSizedInternalFormat = GR_GL_RGBA8I;
+ fConfigTable[kRGBA_8888_sint_GrPixelConfig].fFormats.fExternalFormat[kOther_ExternalFormatUsage] = GR_GL_RGBA_INTEGER;
+ fConfigTable[kRGBA_8888_sint_GrPixelConfig].fFormats.fExternalType = GR_GL_BYTE;
+ fConfigTable[kRGBA_8888_sint_GrPixelConfig].fFormatType = kInteger_FormatType;
+ // We currently only support using integer textures as srcs, not for rendering (even though GL
+ // allows it).
+ if (hasIntegerTextures) {
+ fConfigTable[kRGBA_8888_sint_GrPixelConfig].fFlags = ConfigInfo::kTextureable_Flag |
+ ConfigInfo::kFBOColorAttachment_Flag;
+ if (texStorageSupported) {
+ fConfigTable[kRGBA_8888_sint_GrPixelConfig].fFlags |=
+ ConfigInfo::kCanUseTexStorage_Flag;
+ }
+ }
+
fConfigTable[kRGB_565_GrPixelConfig].fFormats.fBaseInternalFormat = GR_GL_RGB;
if (this->ES2CompatibilitySupport()) {
fConfigTable[kRGB_565_GrPixelConfig].fFormats.fSizedInternalFormat = GR_GL_RGB565;