opengl: use GL_UNSIGNED_BYTE instead of GL_UNSIGNED_INT_8_8_8_8_REV.

This seems to be significantly more efficient on some modern platforms, but if
this turns out to be a widespread disaster, we can revert it.
This commit is contained in:
Ryan C. Gordon 2025-01-07 16:08:56 -05:00
parent 0176a19aee
commit 6934c910b3
No known key found for this signature in database
GPG key ID: FA148B892AB48044

View file

@ -410,13 +410,13 @@ static bool convert_format(Uint32 pixel_format, GLint *internalFormat, GLenum *f
case SDL_PIXELFORMAT_XRGB8888:
*internalFormat = GL_RGBA8;
*format = GL_BGRA;
*type = GL_UNSIGNED_INT_8_8_8_8_REV;
*type = GL_UNSIGNED_BYTE; // previously GL_UNSIGNED_INT_8_8_8_8_REV, seeing if this is better in modern times.
break;
case SDL_PIXELFORMAT_ABGR8888:
case SDL_PIXELFORMAT_XBGR8888:
*internalFormat = GL_RGBA8;
*format = GL_RGBA;
*type = GL_UNSIGNED_INT_8_8_8_8_REV;
*type = GL_UNSIGNED_BYTE; // previously GL_UNSIGNED_INT_8_8_8_8_REV, seeing if this is better in modern times.
break;
case SDL_PIXELFORMAT_YV12:
case SDL_PIXELFORMAT_IYUV: