egl: favor truecolor configurations.

If app requested <= 16 color depth and there is a 24-bit config available,
favor that. This fixes things that quietly expect to get truecolor output
but don't request it (...like SDL's render api...) and things that are
probably requesting 16-bit color as a fallback but expecting reasonable
systems to give them full depth.

Specifically, this fixes Life is Strange on Wayland, which uses the latter
approach, and anything using SDL_Render on Wayland, which uses the former.

Fixes #4056.
Fixes #4132.
This commit is contained in:
Ryan C. Gordon 2021-04-03 18:15:50 -04:00
parent 95e5f0586e
commit 354cabd4a7
No known key found for this signature in database
GPG Key ID: FA148B892AB48044
1 changed files with 44 additions and 9 deletions

View File

@ -700,7 +700,8 @@ SDL_EGL_ChooseConfig(_THIS)
/* 128 seems even nicer here */
EGLConfig configs[128];
SDL_bool has_matching_format = SDL_FALSE;
int i, j, best_bitdiff = -1, bitdiff;
int i, j, best_bitdiff = -1, best_truecolor_bitdiff = -1;
int truecolor_config_idx = -1;
if (!_this->egl_data) {
/* The EGL library wasn't loaded, SDL_GetError() should have info */
@ -804,17 +805,30 @@ SDL_EGL_ChooseConfig(_THIS)
/* From those, we select the one that matches our requirements more closely via a makeshift algorithm */
for (i = 0; i < found_configs; i++ ) {
if (has_matching_format && _this->egl_data->egl_required_visual_id)
{
SDL_bool is_truecolor = SDL_FALSE;
int bitdiff = 0;
if (has_matching_format && _this->egl_data->egl_required_visual_id) {
EGLint format;
_this->egl_data->eglGetConfigAttrib(_this->egl_data->egl_display,
configs[i],
EGL_NATIVE_VISUAL_ID, &format);
if (_this->egl_data->egl_required_visual_id != format)
if (_this->egl_data->egl_required_visual_id != format) {
continue;
}
}
_this->egl_data->eglGetConfigAttrib(_this->egl_data->egl_display, configs[i], EGL_RED_SIZE, &value);
if (value == 8) {
_this->egl_data->eglGetConfigAttrib(_this->egl_data->egl_display, configs[i], EGL_GREEN_SIZE, &value);
if (value == 8) {
_this->egl_data->eglGetConfigAttrib(_this->egl_data->egl_display, configs[i], EGL_BLUE_SIZE, &value);
if (value == 8) {
is_truecolor = SDL_TRUE;
}
}
}
bitdiff = 0;
for (j = 0; j < SDL_arraysize(attribs) - 1; j += 2) {
if (attribs[j] == EGL_NONE) {
break;
@ -832,17 +846,38 @@ SDL_EGL_ChooseConfig(_THIS)
}
}
if (bitdiff < best_bitdiff || best_bitdiff == -1) {
if ((bitdiff < best_bitdiff) || (best_bitdiff == -1)) {
_this->egl_data->egl_config = configs[i];
best_bitdiff = bitdiff;
}
if (bitdiff == 0) {
break; /* we found an exact match! */
if (is_truecolor && ((bitdiff < best_truecolor_bitdiff) || (best_truecolor_bitdiff == -1))) {
truecolor_config_idx = i;
best_truecolor_bitdiff = bitdiff;
}
}
#define FAVOR_TRUECOLOR 1
#if FAVOR_TRUECOLOR
/* Some apps request a low color depth, either because they _assume_
they'll get a larger one but don't want to fail if only smaller ones
are available, or they just never called SDL_GL_SetAttribute at all and
got a tiny default. For these cases, a game that would otherwise run
at 24-bit color might get dithered down to something smaller, which is
worth avoiding. If the app requested <= 16 bit color and an exact 24-bit
match is available, favor that. Otherwise, we look for the closest
match. Note that while the API promises what you request _or better_,
it's feasible this can be disastrous for performance for custom software
on small hardware that all expected to actually get 16-bit color. In this
case, turn off FAVOR_TRUECOLOR (and maybe send a patch to make this more
flexible). */
if ( ((_this->gl_config.red_size + _this->gl_config.blue_size + _this->gl_config.green_size) <= 16) ) {
if (truecolor_config_idx != -1) {
_this->egl_data->egl_config = configs[truecolor_config_idx];
}
}
#endif
#ifdef DUMP_EGL_CONFIG
dumpconfig(_this, _this->egl_data->egl_config);
#endif