diff --git a/include/SDL_hints.h b/include/SDL_hints.h index aff3602ba..d882d3af4 100644 --- a/include/SDL_hints.h +++ b/include/SDL_hints.h @@ -148,7 +148,7 @@ extern "C" { * "0" - Disable XRandR * "1" - Enable XRandR * - * By default SDL will use XRandR if it is available. + * By default SDL will not use XRandR because of window manager issues. */ #define SDL_HINT_VIDEO_X11_XRANDR "SDL_VIDEO_X11_XRANDR" diff --git a/src/video/x11/SDL_x11modes.c b/src/video/x11/SDL_x11modes.c index 49c3451fc..4715476b5 100644 --- a/src/video/x11/SDL_x11modes.c +++ b/src/video/x11/SDL_x11modes.c @@ -25,7 +25,17 @@ #include "SDL_hints.h" #include "SDL_x11video.h" -/*#define X11MODES_DEBUG*/ +#define X11MODES_DEBUG + +/* I'm becoming more and more convinced that the application should never use XRandR, + * and it's the window manager's responsibility to track and manage display modes for + * fullscreen windows. Right now XRandR is completely broken with respect to window + * manager behavior on every window manager that I can find. For example, on Unity 3D + * if you show a fullscreen window while the resolution is changing (within ~250 ms) + * your window will retain the fullscreen state hint but be decorated and windowed. +*/ +#define XRANDR_DISABLED_BY_DEFAULT + static int get_visualinfo(Display * display, int screen, XVisualInfo * vinfo) @@ -190,12 +200,21 @@ CheckXRandR(Display * display, int *major, int *minor) /* Allow environment override */ env = SDL_GetHint(SDL_HINT_VIDEO_X11_XRANDR); +#ifdef XRANDR_DISABLED_BY_DEFAULT + if (!env || !SDL_atoi(env)) { +#ifdef X11MODES_DEBUG + printf("XRandR disabled by default due to window manager issues\n"); +#endif + return SDL_FALSE; + } +#else if (env && !SDL_atoi(env)) { #ifdef X11MODES_DEBUG printf("XRandR disabled due to hint\n"); #endif return SDL_FALSE; } +#endif /* XRANDR_ENABLED_BY_DEFAULT */ if (!SDL_X11_HAVE_XRANDR) { #ifdef X11MODES_DEBUG