Add a way to view the "GPU_USE_" flags at runtime. Useful for sanity checking on-device.
This commit is contained in:
parent
7eee7f2573
commit
f6fcc9e9a7
7 changed files with 88 additions and 1 deletions
|
@ -367,3 +367,46 @@ void GPUStateCache::DoState(PointerWrap &p) {
|
|||
Do(p, savedContextVersion);
|
||||
}
|
||||
}
|
||||
|
||||
static const char *const gpuUseFlagNames[32] = {
|
||||
"GPU_USE_DUALSOURCE_BLEND",
|
||||
"GPU_USE_LIGHT_UBERSHADER",
|
||||
"GPU_USE_FRAGMENT_TEST_CACHE",
|
||||
"GPU_USE_VS_RANGE_CULLING",
|
||||
"GPU_USE_BLEND_MINMAX",
|
||||
"GPU_USE_LOGIC_OP",
|
||||
"GPU_USE_DEPTH_RANGE_HACK",
|
||||
"GPU_USE_TEXTURE_NPOT",
|
||||
"GPU_USE_ANISOTROPY",
|
||||
"GPU_USE_CLEAR_RAM_HACK",
|
||||
"GPU_USE_INSTANCE_RENDERING",
|
||||
"GPU_USE_VERTEX_TEXTURE_FETCH",
|
||||
"GPU_USE_TEXTURE_FLOAT",
|
||||
"GPU_USE_16BIT_FORMATS",
|
||||
"GPU_USE_DEPTH_CLAMP",
|
||||
"GPU_USE_TEXTURE_LOD_CONTROL",
|
||||
"GPU_USE_DEPTH_TEXTURE",
|
||||
"GPU_USE_ACCURATE_DEPTH",
|
||||
"GPU_USE_GS_CULLING",
|
||||
"GPU_USE_REVERSE_COLOR_ORDER",
|
||||
"GPU_USE_FRAMEBUFFER_FETCH",
|
||||
"GPU_SCALE_DEPTH_FROM_24BIT_TO_16BIT",
|
||||
"GPU_ROUND_FRAGMENT_DEPTH_TO_16BIT",
|
||||
"GPU_ROUND_DEPTH_TO_16BIT",
|
||||
"GPU_USE_CLIP_DISTANCE",
|
||||
"GPU_USE_CULL_DISTANCE",
|
||||
"N/A", // bit 26
|
||||
"N/A", // bit 27
|
||||
"N/A", // bit 28
|
||||
"GPU_USE_VIRTUAL_REALITY",
|
||||
"GPU_USE_SINGLE_PASS_STEREO",
|
||||
"GPU_USE_SIMPLE_STEREO_PERSPECTIVE",
|
||||
};
|
||||
|
||||
const char *GpuUseFlagToString(int useFlag) {
|
||||
if ((u32)useFlag < 32) {
|
||||
return gpuUseFlagNames[useFlag];
|
||||
} else {
|
||||
return "N/A";
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue