Move more GPU feature checks upfront. Re-check when exiting settings menu.
This commit is contained in:
parent
a71be3f75f
commit
f524049277
11 changed files with 70 additions and 34 deletions
|
@ -253,7 +253,7 @@ ReplaceBlendType ReplaceBlendWithShader(bool allowShaderBlend) {
|
|||
|
||||
case GE_BLENDMODE_MIN:
|
||||
case GE_BLENDMODE_MAX:
|
||||
if (gl_extensions.EXT_blend_minmax || gl_extensions.GLES3) {
|
||||
if (gstate_c.Supports(GPU_SUPPORTS_BLEND_MINMAX)) {
|
||||
return REPLACE_BLEND_STANDARD;
|
||||
} else {
|
||||
return !allowShaderBlend ? REPLACE_BLEND_STANDARD : REPLACE_BLEND_COPY_FBO;
|
||||
|
|
|
@ -1296,7 +1296,7 @@ void FramebufferManager::BlitFramebuffer(VirtualFramebuffer *dst, int dstX, int
|
|||
useBlit = true;
|
||||
}
|
||||
#else
|
||||
if (gl_extensions.GLES3 || (gstate_c..Supports(GPU_SUPPORTS_NV_FRAMEBUFFER_BLIT)) {
|
||||
if (gl_extensions.GLES3 || gstate_c.Supports(GPU_SUPPORTS_NV_FRAMEBUFFER_BLIT)) {
|
||||
useNV = !gl_extensions.GLES3;
|
||||
useBlit = true;
|
||||
}
|
||||
|
|
|
@ -145,6 +145,8 @@ private:
|
|||
|
||||
void SetNumExtraFBOs(int num);
|
||||
|
||||
inline bool ShouldDownloadUsingCPU(const VirtualFramebuffer *vfb) const;
|
||||
|
||||
#ifndef USING_GLES2
|
||||
void PackFramebufferAsync_(VirtualFramebuffer *vfb);
|
||||
#endif
|
||||
|
|
|
@ -21,6 +21,7 @@
|
|||
|
||||
#include "Common/ChunkFile.h"
|
||||
|
||||
#include "Core/Config.h"
|
||||
#include "Core/Debugger/Breakpoints.h"
|
||||
#include "Core/MemMapHelpers.h"
|
||||
#include "Core/Host.h"
|
||||
|
@ -31,6 +32,7 @@
|
|||
#include "GPU/GPUState.h"
|
||||
#include "GPU/ge_constants.h"
|
||||
#include "GPU/GeDisasm.h"
|
||||
#include "GPU/Common/FramebufferCommon.h"
|
||||
|
||||
#include "GPU/GLES/ShaderManager.h"
|
||||
#include "GPU/GLES/GLES_GPU.h"
|
||||
|
@ -470,13 +472,14 @@ void GLES_GPU::CheckGPUFeatures() {
|
|||
if (gl_extensions.ARB_blend_func_extended /*|| gl_extensions.EXT_blend_func_extended*/)
|
||||
features |= GPU_SUPPORTS_DUALSOURCE_BLEND;
|
||||
|
||||
#ifdef USING_GLES2
|
||||
if (gl_extensions.IsGLES) {
|
||||
if (gl_extensions.GLES3)
|
||||
features |= GPU_SUPPORTS_GLSL_ES_300;
|
||||
#else
|
||||
} else {
|
||||
if (gl_extensions.VersionGEThan(3, 3, 0))
|
||||
features |= GPU_SUPPORTS_GLSL_330;
|
||||
#endif
|
||||
}
|
||||
|
||||
// Framebuffer fetch appears to be buggy at least on Tegra 3 devices. So we blacklist it.
|
||||
// Tales of Destiny 2 has been reported to display green.
|
||||
if (gl_extensions.EXT_shader_framebuffer_fetch || gl_extensions.NV_shader_framebuffer_fetch || gl_extensions.ARM_shader_framebuffer_fetch) {
|
||||
|
@ -491,8 +494,12 @@ void GLES_GPU::CheckGPUFeatures() {
|
|||
features |= GPU_SUPPORTS_FBO_ARB;
|
||||
}
|
||||
|
||||
bool useCPU = false;
|
||||
if (!gl_extensions.IsGLES) {
|
||||
// Urrgh, we don't even define FB_READFBOMEMORY_CPU on mobile
|
||||
#ifndef USING_GLES2
|
||||
bool useCPU = g_Config.iRenderingMode == FB_READFBOMEMORY_CPU;
|
||||
useCPU = g_Config.iRenderingMode == FB_READFBOMEMORY_CPU;
|
||||
#endif
|
||||
// We might get here if hackForce04154000Download_ is hit.
|
||||
// Some cards or drivers seem to always dither when downloading a framebuffer to 16-bit.
|
||||
// This causes glitches in games that expect the exact values.
|
||||
|
@ -502,16 +509,30 @@ void GLES_GPU::CheckGPUFeatures() {
|
|||
useCPU = true;
|
||||
}
|
||||
}
|
||||
#else
|
||||
} else {
|
||||
useCPU = true;
|
||||
#endif
|
||||
if (useCPU) {
|
||||
features |= GPU_PREFER_CPU_DOWNLOAD;
|
||||
}
|
||||
|
||||
if (useCPU)
|
||||
features |= GPU_PREFER_CPU_DOWNLOAD;
|
||||
|
||||
if ((gl_extensions.gpuVendor == GPU_VENDOR_NVIDIA) || (gl_extensions.gpuVendor == GPU_VENDOR_AMD))
|
||||
features |= GPU_PREFER_REVERSE_COLOR_ORDER;
|
||||
|
||||
if (gl_extensions.OES_texture_npot)
|
||||
features |= GPU_SUPPORTS_OES_TEXTURE_NPOT;
|
||||
|
||||
if (gl_extensions.EXT_unpack_subimage || !gl_extensions.IsGLES)
|
||||
features |= GPU_SUPPORTS_UNPACK_SUBIMAGE;
|
||||
|
||||
if (gl_extensions.EXT_blend_minmax || gl_extensions.GLES3)
|
||||
features |= GPU_SUPPORTS_BLEND_MINMAX;
|
||||
|
||||
#ifdef MOBILE_DEVICE
|
||||
// Arguably, we should turn off GPU_IS_MOBILE on like modern Tegras, etc.
|
||||
features |= GPU_IS_MOBILE;
|
||||
#endif
|
||||
|
||||
gstate_c.featureFlags = features;
|
||||
}
|
||||
|
||||
|
|
|
@ -37,6 +37,7 @@ public:
|
|||
GLES_GPU();
|
||||
~GLES_GPU();
|
||||
|
||||
// This gets called on startup and when we get back from settings.
|
||||
void CheckGPUFeatures();
|
||||
|
||||
void InitClear() override;
|
||||
|
|
|
@ -567,7 +567,7 @@ void TransformDrawEngine::ApplyBlendState() {
|
|||
glstate.blendFuncSeparate.set(glBlendFuncA, glBlendFuncB, GL_ZERO, GL_ONE);
|
||||
}
|
||||
|
||||
if (gl_extensions.EXT_blend_minmax || gl_extensions.GLES3) {
|
||||
if (gstate_c.Supports(GPU_SUPPORTS_BLEND_MINMAX)) {
|
||||
glstate.blendEquationSeparate.set(eqLookup[blendFuncEq], alphaEq);
|
||||
} else {
|
||||
glstate.blendEquationSeparate.set(eqLookupNoMinMax[blendFuncEq], alphaEq);
|
||||
|
|
|
@ -66,6 +66,7 @@
|
|||
#define GL_UNPACK_ROW_LENGTH 0x0CF2
|
||||
#endif
|
||||
|
||||
// Hack!
|
||||
extern int g_iNumVideos;
|
||||
|
||||
TextureCache::TextureCache() : cacheSizeEstimate_(0), secondCacheSizeEstimate_(0), clearCacheNextFrame_(false), lowMemoryMode_(false), clutBuf_(NULL), clutMaxBytes_(0), texelsScaledThisFrame_(0) {
|
||||
|
@ -1446,14 +1447,15 @@ void TextureCache::SetTexture(bool force) {
|
|||
scaleFactor = (PSP_CoreParameter().renderWidth + 479) / 480;
|
||||
}
|
||||
|
||||
#ifndef MOBILE_DEVICE
|
||||
scaleFactor = std::min(gl_extensions.OES_texture_npot ? 5 : 4, scaleFactor);
|
||||
// Mobile devices don't get the higher scale factors, too expensive. Very rough way to decide though...
|
||||
if (!gstate_c.Supports(GPU_IS_MOBILE)) {
|
||||
scaleFactor = std::min(gstate_c.Supports(GPU_SUPPORTS_OES_TEXTURE_NPOT) ? 5 : 4, scaleFactor);
|
||||
if (!gl_extensions.OES_texture_npot && scaleFactor == 3) {
|
||||
scaleFactor = 2;
|
||||
}
|
||||
#else
|
||||
scaleFactor = std::min(gl_extensions.OES_texture_npot ? 3 : 2, scaleFactor);
|
||||
#endif
|
||||
} else {
|
||||
scaleFactor = std::min(gstate_c.Supports(GPU_SUPPORTS_OES_TEXTURE_NPOT) ? 3 : 2, scaleFactor);
|
||||
}
|
||||
} else {
|
||||
scaleFactor = g_Config.iTexScalingLevel;
|
||||
}
|
||||
|
@ -1698,7 +1700,7 @@ void *TextureCache::DecodeTextureLevel(GETextureFormat format, GEPaletteFormat c
|
|||
case GE_TFMT_8888:
|
||||
if (!swizzled) {
|
||||
// Special case: if we don't need to deal with packing, we don't need to copy.
|
||||
if ((g_Config.iTexScalingLevel == 1 && gl_extensions.EXT_unpack_subimage) || w == bufw) {
|
||||
if ((g_Config.iTexScalingLevel == 1 && gstate_c.Supports(GPU_SUPPORTS_UNPACK_SUBIMAGE)) || w == bufw) {
|
||||
if (UseBGRA8888()) {
|
||||
tmpTexBuf32.resize(std::max(bufw, w) * h);
|
||||
finalBuf = tmpTexBuf32.data();
|
||||
|
@ -1791,7 +1793,7 @@ void *TextureCache::DecodeTextureLevel(GETextureFormat format, GEPaletteFormat c
|
|||
ERROR_LOG_REPORT(G3D, "NO finalbuf! Will crash!");
|
||||
}
|
||||
|
||||
if (!(g_Config.iTexScalingLevel == 1 && gl_extensions.EXT_unpack_subimage) && w != bufw) {
|
||||
if (!(g_Config.iTexScalingLevel == 1 && gstate_c.Supports(GPU_SUPPORTS_UNPACK_SUBIMAGE)) && w != bufw) {
|
||||
int pixelSize;
|
||||
switch (dstFmt) {
|
||||
case GL_UNSIGNED_SHORT_4_4_4_4:
|
||||
|
@ -1803,6 +1805,7 @@ void *TextureCache::DecodeTextureLevel(GETextureFormat format, GEPaletteFormat c
|
|||
pixelSize = 4;
|
||||
break;
|
||||
}
|
||||
|
||||
// Need to rearrange the buffer to simulate GL_UNPACK_ROW_LENGTH etc.
|
||||
int inRowBytes = bufw * pixelSize;
|
||||
int outRowBytes = w * pixelSize;
|
||||
|
@ -1868,7 +1871,7 @@ void TextureCache::LoadTextureLevel(TexCacheEntry &entry, int level, bool replac
|
|||
gpuStats.numTexturesDecoded++;
|
||||
|
||||
// Can restore these and remove the fixup at the end of DecodeTextureLevel on desktop GL and GLES 3.
|
||||
if ((g_Config.iTexScalingLevel == 1 && gl_extensions.EXT_unpack_subimage) && w != bufw) {
|
||||
if ((g_Config.iTexScalingLevel == 1 && gstate_c.Supports(GPU_SUPPORTS_UNPACK_SUBIMAGE)) && w != bufw) {
|
||||
glPixelStorei(GL_UNPACK_ROW_LENGTH, bufw);
|
||||
useUnpack = true;
|
||||
}
|
||||
|
|
|
@ -18,6 +18,8 @@ class GPUCommon : public GPUThreadEventQueue, public GPUDebugInterface {
|
|||
public:
|
||||
GPUCommon();
|
||||
virtual ~GPUCommon();
|
||||
|
||||
virtual void CheckGPUFeatures() override {}
|
||||
virtual void Reinitialize();
|
||||
|
||||
virtual void InterruptStart(int listid);
|
||||
|
|
|
@ -207,6 +207,8 @@ public:
|
|||
|
||||
static const int DisplayListMaxCount = 64;
|
||||
|
||||
virtual void CheckGPUFeatures() = 0;
|
||||
|
||||
// Initialization
|
||||
virtual void InitClear() = 0;
|
||||
virtual void Reinitialize() = 0;
|
||||
|
|
|
@ -448,9 +448,13 @@ enum {
|
|||
GPU_SUPPORTS_DUALSOURCE_BLEND = FLAG_BIT(0),
|
||||
GPU_SUPPORTS_GLSL_ES_300 = FLAG_BIT(1),
|
||||
GPU_SUPPORTS_GLSL_330 = FLAG_BIT(2),
|
||||
GPU_SUPPORTS_UNPACK_SUBIMAGE = FLAG_BIT(3),
|
||||
GPU_SUPPORTS_BLEND_MINMAX = FLAG_BIT(4),
|
||||
GPU_SUPPORTS_NV_FRAMEBUFFER_BLIT = FLAG_BIT(10),
|
||||
GPU_SUPPORTS_ANY_FRAMEBUFFER_FETCH = FLAG_BIT(20),
|
||||
GPU_SUPPORTS_FBO_ARB = FLAG_BIT(25),
|
||||
GPU_SUPPORTS_OES_TEXTURE_NPOT = FLAG_BIT(26),
|
||||
GPU_IS_MOBILE = FLAG_BIT(29),
|
||||
GPU_PREFER_CPU_DOWNLOAD = FLAG_BIT(30),
|
||||
GPU_PREFER_REVERSE_COLOR_ORDER = FLAG_BIT(31),
|
||||
};
|
||||
|
|
|
@ -180,6 +180,7 @@ void EmuScreen::dialogFinished(const Screen *dialog, DialogResult result) {
|
|||
quit_ = false;
|
||||
}
|
||||
RecreateViews();
|
||||
gpu->CheckGPUFeatures();
|
||||
}
|
||||
|
||||
static void AfterStateLoad(bool success, void *ignored) {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue