2016-01-03 23:09:37 +01:00
// Copyright (c) 2012- PPSSPP Project.
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, version 2.0 or later versions.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License 2.0 for more details.
// A copy of the GPL 2.0 should have been included with the program.
// If not, see http://www.gnu.org/licenses/
// Official git repository and contact information can be found at
// https://github.com/hrydgard/ppsspp and http://www.ppsspp.org/.
# include <algorithm>
# include <cstring>
# include "ext/xxhash.h"
2020-10-04 00:25:21 +02:00
# include "Common/File/VFS/VFS.h"
2020-10-01 13:05:04 +02:00
# include "Common/Data/Text/I18n.h"
2020-10-04 00:25:21 +02:00
# include "Common/Math/math_util.h"
2020-10-04 10:04:01 +02:00
# include "Common/Profiler/Profiler.h"
2020-10-04 23:24:14 +02:00
# include "Common/GPU/thin3d.h"
# include "Common/GPU/Vulkan/VulkanRenderManager.h"
2017-10-30 15:50:02 +01:00
2021-05-01 07:15:04 -07:00
# include "Common/Data/Convert/ColorConv.h"
2020-09-29 12:19:22 +02:00
# include "Common/StringUtils.h"
2021-10-17 08:54:45 -07:00
# include "Common/TimeUtil.h"
2016-01-03 23:09:37 +01:00
# include "Core/Config.h"
# include "Core/Host.h"
# include "Core/MemMap.h"
# include "Core/Reporting.h"
# include "Core/System.h"
2016-03-17 11:56:43 +01:00
2020-10-04 23:24:14 +02:00
# include "Common/GPU/Vulkan/VulkanContext.h"
# include "Common/GPU/Vulkan/VulkanImage.h"
# include "Common/GPU/Vulkan/VulkanMemory.h"
2016-03-17 11:56:43 +01:00
2016-01-03 23:09:37 +01:00
# include "GPU/ge_constants.h"
# include "GPU/GPUState.h"
2022-08-22 12:20:21 +02:00
# include "GPU/Common/TextureShaderCommon.h"
2020-08-01 22:00:04 -07:00
# include "GPU/Common/PostShader.h"
2020-03-08 11:58:00 -07:00
# include "GPU/Common/TextureCacheCommon.h"
# include "GPU/Common/TextureDecoder.h"
2021-07-11 11:26:25 +02:00
# include "GPU/Vulkan/VulkanContext.h"
2016-01-03 00:46:41 +01:00
# include "GPU/Vulkan/TextureCacheVulkan.h"
2020-08-03 23:22:11 +02:00
# include "GPU/Vulkan/FramebufferManagerVulkan.h"
2016-01-03 23:09:37 +01:00
# include "GPU/Vulkan/ShaderManagerVulkan.h"
# include "GPU/Vulkan/DrawEngineVulkan.h"
2021-08-21 12:39:15 +02:00
using namespace PPSSPP_VK ;
2021-08-20 11:22:57 +02:00
2018-04-06 21:40:18 -07:00
# define TEXCACHE_MIN_SLAB_SIZE (8 * 1024 * 1024)
2016-03-26 18:22:21 -07:00
# define TEXCACHE_MAX_SLAB_SIZE (32 * 1024 * 1024)
2018-04-06 21:25:35 -07:00
# define TEXCACHE_SLAB_PRESSURE 4
2016-01-03 23:09:37 +01:00
2017-12-11 12:22:24 +01:00
const char * uploadShader = R " (
# version 450
# extension GL_ARB_separate_shader_objects : enable
2021-11-07 15:47:05 +01:00
// 8x8 is the most common compute shader workgroup size, and works great on all major
// hardware vendors.
layout ( local_size_x = 8 , local_size_y = 8 , local_size_z = 1 ) in ;
2017-12-11 12:22:24 +01:00
2017-12-11 13:23:06 +01:00
uniform layout ( binding = 0 , rgba8 ) writeonly image2D img ;
2017-12-11 12:22:24 +01:00
2017-12-11 13:23:06 +01:00
layout ( std430 , binding = 1 ) buffer Buf {
uint data [ ] ;
} buf ;
2017-12-11 12:22:24 +01:00
layout ( push_constant ) uniform Params {
int width ;
int height ;
} params ;
2019-09-29 16:14:34 -07:00
uint readColoru ( uvec2 p ) {
2021-11-07 16:08:57 +01:00
return buf . data [ p . y * params . width + p . x ] ;
2019-09-29 16:14:34 -07:00
}
vec4 readColorf ( uvec2 p ) {
2017-12-11 12:22:24 +01:00
// Unpack the color (we could look it up in a CLUT here if we wanted...)
2021-11-07 12:55:39 +01:00
// The imageStore repack is free.
2019-09-29 16:14:34 -07:00
return unpackUnorm4x8 ( readColoru ( p ) ) ;
}
2021-11-07 15:47:05 +01:00
void writeColorf ( ivec2 p , vec4 c ) {
imageStore ( img , p , c ) ;
}
2019-09-29 16:14:34 -07:00
% s
2021-11-07 15:47:05 +01:00
// Note that main runs once per INPUT pixel, unlike the old model.
2019-09-29 16:14:34 -07:00
void main ( ) {
uvec2 xy = gl_GlobalInvocationID . xy ;
// Kill off any out-of-image threads to avoid stray writes.
// Should only happen on the tiniest mipmaps as PSP textures are power-of-2,
2021-11-07 15:47:05 +01:00
// and we use a 8x8 workgroup size. Probably not really necessary.
2019-09-29 16:14:34 -07:00
if ( xy . x > = params . width | | xy . y > = params . height )
return ;
2021-11-07 15:47:05 +01:00
// applyScaling will write the upscaled pixels, using writeColorf above.
// It's expected to write a square of scale*scale pixels, at the location xy*scale.
applyScaling ( xy ) ;
2017-12-11 12:22:24 +01:00
}
2021-11-07 15:47:05 +01:00
2017-12-11 12:22:24 +01:00
) " ;
2016-01-09 01:23:32 +01:00
SamplerCache : : ~ SamplerCache ( ) {
2017-08-20 19:03:16 +02:00
DeviceLost ( ) ;
2016-01-09 01:23:32 +01:00
}
2016-01-03 23:09:37 +01:00
2016-01-09 01:23:32 +01:00
VkSampler SamplerCache : : GetOrCreateSampler ( const SamplerCacheKey & key ) {
2017-08-20 19:03:16 +02:00
VkSampler sampler = cache_ . Get ( key ) ;
if ( sampler ! = VK_NULL_HANDLE )
return sampler ;
2016-01-09 01:23:32 +01:00
2016-03-15 00:37:14 +01:00
VkSamplerCreateInfo samp = { VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO } ;
2016-01-09 01:23:32 +01:00
samp . addressModeU = key . sClamp ? VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE : VK_SAMPLER_ADDRESS_MODE_REPEAT ;
samp . addressModeV = key . tClamp ? VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE : VK_SAMPLER_ADDRESS_MODE_REPEAT ;
2022-07-26 10:43:30 +02:00
// W addressing is irrelevant for 2d textures, but Mali recommends that all clamp modes are the same if possible so just copy from U.
samp . addressModeW = key . texture3d ? VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE : samp . addressModeU ;
2016-01-09 21:19:18 +01:00
samp . compareOp = VK_COMPARE_OP_ALWAYS ;
2016-01-09 01:23:32 +01:00
samp . flags = 0 ;
2016-01-10 13:08:54 +01:00
samp . magFilter = key . magFilt ? VK_FILTER_LINEAR : VK_FILTER_NEAREST ;
2016-03-15 00:37:14 +01:00
samp . minFilter = key . minFilt ? VK_FILTER_LINEAR : VK_FILTER_NEAREST ;
samp . mipmapMode = key . mipFilt ? VK_SAMPLER_MIPMAP_MODE_LINEAR : VK_SAMPLER_MIPMAP_MODE_NEAREST ;
2017-11-15 19:07:41 +01:00
if ( key . aniso ) {
2016-03-17 21:56:04 -07:00
// Docs say the min of this value and the supported max are used.
samp . maxAnisotropy = 1 < < g_Config . iAnisotropyLevel ;
2016-03-15 00:37:14 +01:00
samp . anisotropyEnable = true ;
2016-03-17 21:59:16 -07:00
} else {
samp . maxAnisotropy = 1.0f ;
samp . anisotropyEnable = false ;
2016-03-15 00:37:14 +01:00
}
2022-05-01 00:52:37 +02:00
if ( key . maxLevel = = 9 * 256 ) {
// No max level needed.
samp . maxLod = VK_LOD_CLAMP_NONE ;
} else {
samp . maxLod = ( float ) ( int32_t ) key . maxLevel * ( 1.0f / 256.0f ) ;
}
2017-11-15 16:31:17 +01:00
samp . minLod = ( float ) ( int32_t ) key . minLevel * ( 1.0f / 256.0f ) ;
samp . mipLodBias = ( float ) ( int32_t ) key . lodBias * ( 1.0f / 256.0f ) ;
2016-01-09 01:23:32 +01:00
VkResult res = vkCreateSampler ( vulkan_ - > GetDevice ( ) , & samp , nullptr , & sampler ) ;
2020-08-16 00:38:55 +02:00
_assert_ ( res = = VK_SUCCESS ) ;
2017-08-20 19:03:16 +02:00
cache_ . Insert ( key , sampler ) ;
2016-01-09 01:23:32 +01:00
return sampler ;
}
2017-12-06 16:01:56 +01:00
std : : string SamplerCache : : DebugGetSamplerString ( std : : string id , DebugShaderStringType stringType ) {
SamplerCacheKey key ;
key . FromString ( id ) ;
return StringFromFormat ( " %s/%s mag:%s min:%s mip:%s maxLod:%f minLod:%f bias:%f " ,
key . sClamp ? " Clamp " : " Wrap " ,
key . tClamp ? " Clamp " : " Wrap " ,
key . magFilt ? " Linear " : " Nearest " ,
key . minFilt ? " Linear " : " Nearest " ,
key . mipFilt ? " Linear " : " Nearest " ,
key . maxLevel / 256.0f ,
key . minLevel / 256.0f ,
key . lodBias / 256.0f ) ;
}
2016-10-09 11:26:44 -07:00
void SamplerCache : : DeviceLost ( ) {
2017-08-20 19:03:16 +02:00
cache_ . Iterate ( [ & ] ( const SamplerCacheKey & key , VkSampler sampler ) {
vulkan_ - > Delete ( ) . QueueDeleteSampler ( sampler ) ;
} ) ;
cache_ . Clear ( ) ;
2022-08-23 20:15:30 -07:00
vulkan_ = nullptr ;
2016-10-09 11:26:44 -07:00
}
void SamplerCache : : DeviceRestore ( VulkanContext * vulkan ) {
vulkan_ = vulkan ;
}
2017-12-06 16:01:56 +01:00
std : : vector < std : : string > SamplerCache : : DebugGetSamplerIDs ( ) const {
std : : vector < std : : string > ids ;
cache_ . Iterate ( [ & ] ( const SamplerCacheKey & id , VkSampler sampler ) {
std : : string idstr ;
id . ToString ( & idstr ) ;
ids . push_back ( idstr ) ;
} ) ;
return ids ;
}
2022-08-23 11:11:04 +02:00
TextureCacheVulkan : : TextureCacheVulkan ( Draw : : DrawContext * draw , Draw2D * draw2D , VulkanContext * vulkan )
: TextureCacheCommon ( draw , draw2D ) ,
2021-02-15 10:29:34 -08:00
computeShaderManager_ ( vulkan ) ,
samplerCache_ ( vulkan ) {
2021-11-14 15:25:28 -08:00
DeviceRestore ( draw ) ;
2016-01-03 23:09:37 +01:00
}
TextureCacheVulkan : : ~ TextureCacheVulkan ( ) {
2017-11-10 12:40:51 +01:00
DeviceLost ( ) ;
2016-01-03 23:09:37 +01:00
}
2017-02-08 15:58:46 +01:00
void TextureCacheVulkan : : SetFramebufferManager ( FramebufferManagerVulkan * fbManager ) {
framebufferManager_ = fbManager ;
}
2016-10-09 11:26:44 -07:00
void TextureCacheVulkan : : DeviceLost ( ) {
2022-08-22 12:28:46 +02:00
textureShaderCache_ - > DeviceLost ( ) ;
2022-08-05 13:27:55 +02:00
2022-02-06 23:34:44 -08:00
VulkanContext * vulkan = draw_ ? ( VulkanContext * ) draw_ - > GetNativeObject ( Draw : : NativeObject : : CONTEXT ) : nullptr ;
2021-11-14 15:25:28 -08:00
2016-10-09 11:26:44 -07:00
Clear ( true ) ;
samplerCache_ . DeviceLost ( ) ;
2018-03-22 22:10:05 +01:00
2018-03-23 04:14:10 +01:00
if ( samplerNearest_ )
2021-11-14 15:25:28 -08:00
vulkan - > Delete ( ) . QueueDeleteSampler ( samplerNearest_ ) ;
2016-10-09 11:26:44 -07:00
2019-09-30 00:36:42 -07:00
if ( uploadCS_ ! = VK_NULL_HANDLE )
2021-11-14 15:25:28 -08:00
vulkan - > Delete ( ) . QueueDeleteShaderModule ( uploadCS_ ) ;
2017-12-11 13:23:06 +01:00
computeShaderManager_ . DeviceLost ( ) ;
2017-12-11 11:26:05 +01:00
2016-10-09 11:26:44 -07:00
nextTexture_ = nullptr ;
2022-02-06 23:34:44 -08:00
draw_ = nullptr ;
2016-10-09 11:26:44 -07:00
}
2021-11-14 15:25:28 -08:00
void TextureCacheVulkan : : DeviceRestore ( Draw : : DrawContext * draw ) {
VulkanContext * vulkan = ( VulkanContext * ) draw - > GetNativeObject ( Draw : : NativeObject : : CONTEXT ) ;
2017-11-09 16:28:22 +01:00
draw_ = draw ;
2016-10-09 11:26:44 -07:00
2020-08-16 00:38:55 +02:00
_assert_ ( ! allocator_ ) ;
2017-10-09 12:17:54 +02:00
2016-10-09 11:26:44 -07:00
samplerCache_ . DeviceRestore ( vulkan ) ;
2022-08-22 12:28:46 +02:00
textureShaderCache_ - > DeviceRestore ( draw ) ;
2017-11-10 12:40:51 +01:00
VkSamplerCreateInfo samp { VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO } ;
samp . addressModeU = VK_SAMPLER_ADDRESS_MODE_REPEAT ;
samp . addressModeV = VK_SAMPLER_ADDRESS_MODE_REPEAT ;
samp . addressModeW = VK_SAMPLER_ADDRESS_MODE_REPEAT ;
samp . magFilter = VK_FILTER_NEAREST ;
samp . minFilter = VK_FILTER_NEAREST ;
samp . mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST ;
2021-11-14 15:25:28 -08:00
VkResult res = vkCreateSampler ( vulkan - > GetDevice ( ) , & samp , nullptr , & samplerNearest_ ) ;
2020-09-15 23:09:58 +02:00
_assert_ ( res = = VK_SUCCESS ) ;
2017-12-11 11:26:05 +01:00
2020-08-01 22:00:04 -07:00
CompileScalingShader ( ) ;
2017-12-11 12:22:24 +01:00
2017-12-11 13:23:06 +01:00
computeShaderManager_ . DeviceRestore ( vulkan ) ;
2016-10-09 11:26:44 -07:00
}
2020-08-01 22:00:04 -07:00
void TextureCacheVulkan : : NotifyConfigChanged ( ) {
TextureCacheCommon : : NotifyConfigChanged ( ) ;
CompileScalingShader ( ) ;
}
2021-05-09 15:25:12 +02:00
static std : : string ReadShaderSrc ( const Path & filename ) {
2020-08-01 22:00:04 -07:00
size_t sz = 0 ;
char * data = ( char * ) VFSReadFile ( filename . c_str ( ) , & sz ) ;
if ( ! data )
2022-04-13 23:44:03 +02:00
return std : : string ( ) ;
2020-08-01 22:00:04 -07:00
std : : string src ( data , sz ) ;
2021-01-01 15:40:12 -08:00
delete [ ] data ;
2020-08-01 22:00:04 -07:00
return src ;
}
void TextureCacheVulkan : : CompileScalingShader ( ) {
2021-11-14 15:25:28 -08:00
VulkanContext * vulkan = ( VulkanContext * ) draw_ - > GetNativeObject ( Draw : : NativeObject : : CONTEXT ) ;
2020-08-01 22:00:04 -07:00
if ( ! g_Config . bTexHardwareScaling | | g_Config . sTextureShaderName ! = textureShader_ ) {
if ( uploadCS_ ! = VK_NULL_HANDLE )
2021-11-14 15:25:28 -08:00
vulkan - > Delete ( ) . QueueDeleteShaderModule ( uploadCS_ ) ;
2020-08-01 22:00:04 -07:00
textureShader_ . clear ( ) ;
2021-11-07 13:12:28 +01:00
shaderScaleFactor_ = 0 ; // no texture scaling shader
2021-11-07 12:55:39 +01:00
} else if ( uploadCS_ ) {
2021-01-18 13:31:43 -08:00
// No need to recreate.
return ;
2020-08-01 22:00:04 -07:00
}
2021-11-07 12:55:39 +01:00
2020-08-01 22:00:04 -07:00
if ( ! g_Config . bTexHardwareScaling )
return ;
2021-09-28 23:35:57 +02:00
ReloadAllPostShaderInfo ( draw_ ) ;
2020-08-01 22:00:04 -07:00
const TextureShaderInfo * shaderInfo = GetTextureShaderInfo ( g_Config . sTextureShaderName ) ;
if ( ! shaderInfo | | shaderInfo - > computeShaderFile . empty ( ) )
return ;
std : : string shaderSource = ReadShaderSrc ( shaderInfo - > computeShaderFile ) ;
std : : string fullUploadShader = StringFromFormat ( uploadShader , shaderSource . c_str ( ) ) ;
std : : string error ;
2021-11-14 15:25:28 -08:00
uploadCS_ = CompileShaderModule ( vulkan , VK_SHADER_STAGE_COMPUTE_BIT , fullUploadShader . c_str ( ) , & error ) ;
2020-08-01 22:00:04 -07:00
_dbg_assert_msg_ ( uploadCS_ ! = VK_NULL_HANDLE , " failed to compile upload shader " ) ;
textureShader_ = g_Config . sTextureShaderName ;
2021-11-07 13:12:28 +01:00
shaderScaleFactor_ = shaderInfo - > scaleFactor ;
2020-08-01 22:00:04 -07:00
}
2017-02-23 17:31:24 +01:00
void TextureCacheVulkan : : ReleaseTexture ( TexCacheEntry * entry , bool delete_them ) {
2017-02-19 22:31:07 +01:00
delete entry - > vkTex ;
2017-02-23 09:25:33 +01:00
entry - > vkTex = nullptr ;
2016-01-03 23:09:37 +01:00
}
VkFormat getClutDestFormatVulkan ( GEPaletteFormat format ) {
switch ( format ) {
case GE_CMODE_16BIT_ABGR4444 :
2016-01-09 21:19:18 +01:00
return VULKAN_4444_FORMAT ;
2016-01-03 23:09:37 +01:00
case GE_CMODE_16BIT_ABGR5551 :
2016-01-09 21:19:18 +01:00
return VULKAN_1555_FORMAT ;
2016-01-03 23:09:37 +01:00
case GE_CMODE_16BIT_BGR5650 :
2016-01-09 21:19:18 +01:00
return VULKAN_565_FORMAT ;
2016-01-03 23:09:37 +01:00
case GE_CMODE_32BIT_ABGR8888 :
2016-01-09 21:19:18 +01:00
return VULKAN_8888_FORMAT ;
2016-01-03 23:09:37 +01:00
}
return VK_FORMAT_UNDEFINED ;
}
static const VkFilter MagFiltVK [ 2 ] = {
VK_FILTER_NEAREST ,
VK_FILTER_LINEAR
} ;
void TextureCacheVulkan : : StartFrame ( ) {
2022-08-05 13:27:55 +02:00
TextureCacheCommon : : StartFrame ( ) ;
2017-03-25 11:34:21 -07:00
InvalidateLastTexture ( ) ;
2022-08-22 12:28:46 +02:00
textureShaderCache_ - > Decimate ( ) ;
2017-10-30 12:05:08 +01:00
2016-01-03 23:09:37 +01:00
timesInvalidatedAllThisFrame_ = 0 ;
texelsScaledThisFrame_ = 0 ;
2021-10-17 08:54:45 -07:00
replacementTimeThisFrame_ = 0.0 ;
2016-03-27 08:33:22 -07:00
2016-01-03 23:09:37 +01:00
if ( clearCacheNextFrame_ ) {
Clear ( true ) ;
clearCacheNextFrame_ = false ;
} else {
2018-04-07 10:40:03 +02:00
int slabPressureLimit = TEXCACHE_SLAB_PRESSURE ;
if ( g_Config . iTexScalingLevel > 1 ) {
// Since textures are 2D maybe we should square this, but might get too non-aggressive.
slabPressureLimit * = g_Config . iTexScalingLevel ;
}
2021-11-23 08:53:19 +01:00
// TODO: Use some indication from VMA.
// Maybe see https://gpuopen-librariesandsdks.github.io/VulkanMemoryAllocator/html/staying_within_budget.html#staying_within_budget_querying_for_budget .
Decimate ( false ) ;
2016-01-03 23:09:37 +01:00
}
2016-03-26 18:22:21 -07:00
2017-12-11 13:23:06 +01:00
computeShaderManager_ . BeginFrame ( ) ;
2016-01-03 23:09:37 +01:00
}
2016-03-27 08:33:22 -07:00
void TextureCacheVulkan : : EndFrame ( ) {
2017-12-11 13:23:06 +01:00
computeShaderManager_ . EndFrame ( ) ;
2016-03-27 08:33:22 -07:00
if ( texelsScaledThisFrame_ ) {
2020-09-13 15:57:26 +02:00
VERBOSE_LOG ( G3D , " Scaled %i texels " , texelsScaledThisFrame_ ) ;
2016-03-27 08:33:22 -07:00
}
}
2016-01-03 23:09:37 +01:00
void TextureCacheVulkan : : UpdateCurrentClut ( GEPaletteFormat clutFormat , u32 clutBase , bool clutIndexIsSimple ) {
const u32 clutBaseBytes = clutFormat = = GE_CMODE_32BIT_ABGR8888 ? ( clutBase * sizeof ( u32 ) ) : ( clutBase * sizeof ( u16 ) ) ;
// Technically, these extra bytes weren't loaded, but hopefully it was loaded earlier.
// If not, we're going to hash random data, which hopefully doesn't cause a performance issue.
//
// TODO: Actually, this seems like a hack. The game can upload part of a CLUT and reference other data.
// clutTotalBytes_ is the last amount uploaded. We should hash clutMaxBytes_, but this will often hash
// unrelated old entries for small palettes.
// Adding clutBaseBytes may just be mitigating this for some usage patterns.
const u32 clutExtendedBytes = std : : min ( clutTotalBytes_ + clutBaseBytes , clutMaxBytes_ ) ;
2020-08-27 20:46:39 -07:00
if ( replacer_ . Enabled ( ) )
2020-08-28 01:15:22 -07:00
clutHash_ = XXH32 ( ( const char * ) clutBufRaw_ , clutExtendedBytes , 0xC0108888 ) ;
2020-08-27 20:46:39 -07:00
else
clutHash_ = XXH3_64bits ( ( const char * ) clutBufRaw_ , clutExtendedBytes ) & 0xFFFFFFFF ;
2016-03-26 13:17:17 -07:00
clutBuf_ = clutBufRaw_ ;
2016-01-03 23:09:37 +01:00
// Special optimization: fonts typically draw clut4 with just alpha values in a single color.
clutAlphaLinear_ = false ;
clutAlphaLinearColor_ = 0 ;
if ( clutFormat = = GE_CMODE_16BIT_ABGR4444 & & clutIndexIsSimple ) {
const u16_le * clut = GetCurrentClut < u16_le > ( ) ;
clutAlphaLinear_ = true ;
2016-03-26 16:13:51 -07:00
clutAlphaLinearColor_ = clut [ 15 ] & 0x0FFF ;
2016-01-03 23:09:37 +01:00
for ( int i = 0 ; i < 16 ; + + i ) {
2016-03-26 16:13:51 -07:00
u16 step = clutAlphaLinearColor_ | ( i < < 12 ) ;
2016-01-03 23:09:37 +01:00
if ( clut [ i ] ! = step ) {
clutAlphaLinear_ = false ;
break ;
}
}
}
clutLastFormat_ = gstate . clutformat ;
}
2017-02-19 23:07:00 +01:00
void TextureCacheVulkan : : BindTexture ( TexCacheEntry * entry ) {
2022-08-18 09:17:36 +02:00
if ( ! entry ) {
imageView_ = VK_NULL_HANDLE ;
curSampler_ = VK_NULL_HANDLE ;
return ;
}
2017-02-19 23:07:00 +01:00
2022-08-18 09:17:36 +02:00
_dbg_assert_ ( entry - > vkTex ) ;
2018-04-06 21:17:31 -07:00
entry - > vkTex - > Touch ( ) ;
2022-08-18 09:17:36 +02:00
2022-07-29 12:25:04 +02:00
int maxLevel = ( entry - > status & TexCacheEntry : : STATUS_NO_MIPS ) ? 0 : entry - > maxLevel ;
2021-02-27 17:17:21 -08:00
SamplerCacheKey samplerKey = GetSamplingParams ( maxLevel , entry ) ;
2020-09-13 23:46:57 +02:00
curSampler_ = samplerCache_ . GetOrCreateSampler ( samplerKey ) ;
2022-08-18 09:17:36 +02:00
imageView_ = entry - > vkTex - > GetImageView ( ) ;
2022-08-24 09:31:47 +02:00
drawEngine_ - > SetDepalTexture ( VK_NULL_HANDLE , false ) ;
2022-08-21 23:46:01 +02:00
gstate_c . SetUseShaderDepal ( false , false ) ;
2017-02-19 23:07:00 +01:00
}
2022-08-05 12:34:29 +02:00
void TextureCacheVulkan : : ApplySamplingParams ( const SamplerCacheKey & key ) {
curSampler_ = samplerCache_ . GetOrCreateSampler ( key ) ;
}
2017-02-08 15:43:53 +01:00
void TextureCacheVulkan : : Unbind ( ) {
2017-02-19 23:19:55 +01:00
imageView_ = VK_NULL_HANDLE ;
2017-10-30 15:50:02 +01:00
curSampler_ = VK_NULL_HANDLE ;
2017-03-25 11:34:21 -07:00
InvalidateLastTexture ( ) ;
2016-01-03 23:09:37 +01:00
}
2022-08-24 09:31:47 +02:00
void TextureCacheVulkan : : BindAsClutTexture ( Draw : : Texture * tex , bool smooth ) {
2022-08-05 12:02:36 +02:00
VkImageView clutTexture = ( VkImageView ) draw_ - > GetNativeObject ( Draw : : NativeObject : : TEXTURE_VIEW , tex ) ;
2022-08-24 09:31:47 +02:00
drawEngine_ - > SetDepalTexture ( clutTexture , smooth ) ;
2022-08-05 12:02:36 +02:00
}
2022-07-29 18:36:21 +02:00
static Draw : : DataFormat FromVulkanFormat ( VkFormat fmt ) {
2016-04-30 13:44:31 -07:00
switch ( fmt ) {
2022-07-29 18:36:21 +02:00
case VULKAN_8888_FORMAT : default : return Draw : : DataFormat : : R8G8B8A8_UNORM ;
2016-04-30 13:44:31 -07:00
}
}
2022-07-29 18:36:21 +02:00
static VkFormat ToVulkanFormat ( Draw : : DataFormat fmt ) {
2016-04-30 13:44:31 -07:00
switch ( fmt ) {
2022-07-29 18:36:21 +02:00
case Draw : : DataFormat : : R8G8B8A8_UNORM : default : return VULKAN_8888_FORMAT ;
2016-04-30 13:44:31 -07:00
}
}
2018-03-25 10:49:28 +02:00
void TextureCacheVulkan : : BuildTexture ( TexCacheEntry * const entry ) {
2022-07-29 00:32:01 +02:00
VulkanContext * vulkan = ( VulkanContext * ) draw_ - > GetNativeObject ( Draw : : NativeObject : : CONTEXT ) ;
2022-07-29 13:16:47 +02:00
BuildTexturePlan plan ;
plan . hardwareScaling = g_Config . bTexHardwareScaling & & uploadCS_ ! = VK_NULL_HANDLE ;
2022-07-30 18:18:51 -07:00
plan . slowScaler = ! plan . hardwareScaling | | vulkan - > DevicePerfClass ( ) = = PerfClass : : SLOW ;
2022-07-29 13:16:47 +02:00
if ( ! PrepareBuildTexture ( plan , entry ) ) {
// We're screwed?
2016-01-03 23:09:37 +01:00
return ;
}
2022-07-29 00:32:01 +02:00
VkFormat dstFmt = GetDestFormat ( GETextureFormat ( entry - > format ) , gstate . getClutPaletteFormat ( ) ) ;
2022-08-03 22:21:49 +02:00
if ( plan . scaleFactor > 1 ) {
// Whether hardware or software scaling, this is the dest format.
2022-08-01 11:27:58 +02:00
dstFmt = VULKAN_8888_FORMAT ;
2022-07-29 00:32:01 +02:00
}
2021-12-13 09:13:48 +01:00
// We don't generate mipmaps for 512x512 textures because they're almost exclusively used for menu backgrounds
// and similar, which don't really need it.
2022-07-29 13:16:47 +02:00
if ( g_Config . iTexFiltering = = TEX_FILTER_AUTO_MAX_QUALITY & & plan . w < = 256 & & plan . h < = 256 ) {
2021-12-07 21:28:58 +01:00
// Boost the number of mipmaps.
2022-08-09 17:26:39 +02:00
if ( plan . maxPossibleLevels > plan . levelsToCreate ) {
2021-12-13 09:13:48 +01:00
// We have to generate mips with a shader. This requires decoding to R8G8B8A8_UNORM format to avoid extra complications.
2022-08-01 11:27:58 +02:00
dstFmt = VULKAN_8888_FORMAT ;
2021-12-07 21:28:58 +01:00
}
2022-08-09 17:26:39 +02:00
plan . levelsToCreate = plan . maxPossibleLevels ;
2021-12-07 21:28:58 +01:00
}
2021-10-05 22:49:33 +02:00
// Any texture scaling is gonna move away from the original 16-bit format, if any.
2022-07-29 13:16:47 +02:00
VkFormat actualFmt = plan . scaleFactor > 1 ? VULKAN_8888_FORMAT : dstFmt ;
2022-08-30 12:52:03 +02:00
if ( plan . replaceValid ) {
2022-08-21 22:03:16 -07:00
actualFmt = ToVulkanFormat ( plan . replaced - > Format ( plan . baseLevelSrc ) ) ;
2016-04-30 13:44:31 -07:00
}
2017-10-31 23:49:47 +01:00
2017-12-11 12:22:24 +01:00
bool computeUpload = false ;
2020-08-03 23:14:27 +02:00
VkCommandBuffer cmdInit = ( VkCommandBuffer ) draw_ - > GetNativeObject ( Draw : : NativeObject : : INIT_COMMANDBUFFER ) ;
2017-12-11 12:22:24 +01:00
2022-07-29 13:23:28 +02:00
delete entry - > vkTex ;
entry - > vkTex = new VulkanTexture ( vulkan ) ;
VulkanTexture * image = entry - > vkTex ;
const VkComponentMapping * mapping ;
switch ( actualFmt ) {
case VULKAN_4444_FORMAT : mapping = & VULKAN_4444_SWIZZLE ; break ;
case VULKAN_1555_FORMAT : mapping = & VULKAN_1555_SWIZZLE ; break ;
case VULKAN_565_FORMAT : mapping = & VULKAN_565_SWIZZLE ; break ;
default : mapping = & VULKAN_8888_SWIZZLE ; break ;
}
2016-03-26 13:17:17 -07:00
2022-07-29 13:23:28 +02:00
VkImageLayout imageLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL ;
VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT ;
2017-12-11 12:22:24 +01:00
2022-07-29 13:23:28 +02:00
if ( actualFmt = = VULKAN_8888_FORMAT & & plan . scaleFactor > 1 & & plan . hardwareScaling ) {
if ( uploadCS_ ! = VK_NULL_HANDLE ) {
computeUpload = true ;
} else {
WARN_LOG ( G3D , " Falling back to software scaling, hardware shader didn't compile " ) ;
2017-12-11 12:22:24 +01:00
}
2022-07-29 13:23:28 +02:00
}
2017-12-11 12:22:24 +01:00
2022-07-29 13:23:28 +02:00
if ( computeUpload ) {
usage | = VK_IMAGE_USAGE_STORAGE_BIT ;
imageLayout = VK_IMAGE_LAYOUT_GENERAL ;
}
2017-12-11 12:22:24 +01:00
2022-09-01 15:02:51 +02:00
if ( plan . saveTexture ) {
actualFmt = VULKAN_8888_FORMAT ;
2022-09-01 14:49:02 +02:00
}
2022-07-29 13:23:28 +02:00
char texName [ 128 ] { } ;
snprintf ( texName , sizeof ( texName ) , " tex_%08x_%s " , entry - > addr , GeTextureFormatToString ( ( GETextureFormat ) entry - > format , gstate . getClutPaletteFormat ( ) ) ) ;
image - > SetTag ( texName ) ;
2022-08-30 11:56:14 +02:00
bool allocSuccess = image - > CreateDirect ( cmdInit , plan . createW , plan . createH , plan . depth , plan . levelsToCreate , actualFmt , imageLayout , usage , mapping ) ;
2022-09-01 19:05:20 -07:00
if ( ! allocSuccess & & ! lowMemoryMode_ ) {
2022-07-29 13:23:28 +02:00
WARN_LOG_REPORT ( G3D , " Texture cache ran out of GPU memory; switching to low memory mode " ) ;
lowMemoryMode_ = true ;
decimationCounter_ = 0 ;
Decimate ( ) ;
// TODO: We should stall the GPU here and wipe things out of memory.
// As is, it will almost definitely fail the second time, but next frame it may recover.
auto err = GetI18NCategory ( " Error " ) ;
if ( plan . scaleFactor > 1 ) {
host - > NotifyUserMessage ( err - > T ( " Warning: Video memory FULL, reducing upscaling and switching to slow caching mode " ) , 2.0f ) ;
} else {
host - > NotifyUserMessage ( err - > T ( " Warning: Video memory FULL, switching to slow caching mode " ) , 2.0f ) ;
}
2016-03-26 18:41:53 -07:00
2022-08-30 11:56:14 +02:00
// Turn off texture replacement for this texture.
plan . replaced = & replacer_ . FindNone ( ) ;
plan . createW / = plan . scaleFactor ;
plan . createH / = plan . scaleFactor ;
2022-07-29 13:23:28 +02:00
plan . scaleFactor = 1 ;
actualFmt = dstFmt ;
2016-03-26 18:41:53 -07:00
2022-08-30 11:56:14 +02:00
allocSuccess = image - > CreateDirect ( cmdInit , plan . createW , plan . createH , plan . depth , plan . levelsToCreate , actualFmt , VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL , VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT , mapping ) ;
2022-07-29 13:23:28 +02:00
}
2016-03-26 18:41:53 -07:00
2022-07-29 13:23:28 +02:00
if ( ! allocSuccess ) {
ERROR_LOG ( G3D , " Failed to create texture (%dx%d) " , plan . w , plan . h ) ;
delete entry - > vkTex ;
entry - > vkTex = nullptr ;
2016-01-09 21:54:57 +01:00
}
2016-01-09 21:19:18 +01:00
2022-07-29 13:16:47 +02:00
if ( ! entry - > vkTex ) {
return ;
}
2022-07-27 14:37:08 +02:00
VK_PROFILE_BEGIN ( vulkan , cmdInit , VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT ,
2022-07-29 13:16:47 +02:00
" Texture Upload (%08x) video=%d " , entry - > addr , plan . isVideo ) ;
2021-11-07 15:47:05 +01:00
2022-07-25 18:51:08 +02:00
// Upload the texture data. We simply reuse the same loop for 3D texture slices instead of mips, if we have those.
int levels ;
if ( plan . depth > 1 ) {
levels = plan . depth ;
} else {
levels = plan . levelsToLoad ;
}
for ( int i = 0 ; i < levels ; i + + ) {
2022-07-27 14:37:08 +02:00
int mipUnscaledWidth = gstate . getTextureWidth ( i ) ;
int mipUnscaledHeight = gstate . getTextureHeight ( i ) ;
2022-04-17 20:42:58 -07:00
2022-08-30 11:56:14 +02:00
int mipWidth ;
int mipHeight ;
plan . GetMipSize ( i , & mipWidth , & mipHeight ) ;
2022-04-17 20:42:58 -07:00
2022-07-27 14:37:08 +02:00
int bpp = actualFmt = = VULKAN_8888_FORMAT ? 4 : 2 ; // output bpp
int stride = ( mipWidth * bpp + 15 ) & ~ 15 ; // output stride
2022-08-30 11:56:14 +02:00
int uploadSize = stride * mipHeight ;
2022-07-27 14:37:08 +02:00
uint32_t bufferOffset ;
VkBuffer texBuf ;
// NVIDIA reports a min alignment of 1 but that can't be healthy... let's align by 16 as a minimum.
int pushAlignment = std : : max ( 16 , ( int ) vulkan - > GetPhysicalDeviceProperties ( ) . properties . limits . optimalBufferCopyOffsetAlignment ) ;
void * data ;
std : : vector < uint8_t > saveData ;
2022-07-29 13:16:47 +02:00
auto loadLevel = [ & ] ( int sz , int srcLevel , int lstride , int lfactor ) {
2022-09-01 15:02:51 +02:00
if ( plan . saveTexture ) {
2022-07-27 14:37:08 +02:00
saveData . resize ( sz ) ;
data = & saveData [ 0 ] ;
2016-04-30 13:44:31 -07:00
} else {
2022-07-27 14:37:08 +02:00
data = drawEngine_ - > GetPushBufferForTextureData ( ) - > PushAligned ( sz , & bufferOffset , & texBuf , pushAlignment ) ;
}
2022-09-01 14:49:02 +02:00
LoadTextureLevel ( * entry , ( uint8_t * ) data , lstride , srcLevel , lfactor , actualFmt ) ;
2022-09-01 15:02:51 +02:00
if ( plan . saveTexture )
2022-07-27 14:37:08 +02:00
bufferOffset = drawEngine_ - > GetPushBufferForTextureData ( ) - > PushAligned ( & saveData [ 0 ] , sz , pushAlignment , & texBuf ) ;
} ;
bool dataScaled = true ;
2022-08-30 12:52:03 +02:00
if ( plan . replaceValid ) {
2022-07-27 14:37:08 +02:00
// Directly load the replaced image.
2022-08-30 11:56:14 +02:00
data = drawEngine_ - > GetPushBufferForTextureData ( ) - > PushAligned ( uploadSize , & bufferOffset , & texBuf , pushAlignment ) ;
2022-07-27 14:37:08 +02:00
double replaceStart = time_now_d ( ) ;
2022-08-21 22:03:16 -07:00
plan . replaced - > Load ( plan . baseLevelSrc + i , data , stride ) ; // if it fails, it'll just be garbage data... OK for now.
2022-07-27 14:37:08 +02:00
replacementTimeThisFrame_ + = time_now_d ( ) - replaceStart ;
VK_PROFILE_BEGIN ( vulkan , cmdInit , VK_PIPELINE_STAGE_TRANSFER_BIT ,
" Copy Upload (replaced): %dx%d " , mipWidth , mipHeight ) ;
2022-07-25 18:51:08 +02:00
entry - > vkTex - > UploadMip ( cmdInit , i , mipWidth , mipHeight , 0 , texBuf , bufferOffset , stride / bpp ) ;
2022-07-27 14:37:08 +02:00
VK_PROFILE_END ( vulkan , cmdInit , VK_PIPELINE_STAGE_TRANSFER_BIT ) ;
} else {
2022-07-25 18:51:08 +02:00
if ( plan . depth ! = 1 ) {
2022-08-30 11:56:14 +02:00
// 3D texturing.
loadLevel ( uploadSize , i , stride , plan . scaleFactor ) ;
2022-07-25 18:51:08 +02:00
entry - > vkTex - > UploadMip ( cmdInit , 0 , mipWidth , mipHeight , i , texBuf , bufferOffset , stride / bpp ) ;
} else if ( computeUpload ) {
2022-07-29 13:16:47 +02:00
int srcBpp = dstFmt = = VULKAN_8888_FORMAT ? 4 : 2 ;
int srcStride = mipUnscaledWidth * srcBpp ;
int srcSize = srcStride * mipUnscaledHeight ;
loadLevel ( srcSize , i = = 0 ? plan . baseLevelSrc : i , srcStride , 1 ) ;
dataScaled = false ;
// This format can be used with storage images.
VkImageView view = entry - > vkTex - > CreateViewForMip ( i ) ;
VkDescriptorSet descSet = computeShaderManager_ . GetDescriptorSet ( view , texBuf , bufferOffset , srcSize ) ;
struct Params { int x ; int y ; } params { mipUnscaledWidth , mipUnscaledHeight } ;
VK_PROFILE_BEGIN ( vulkan , cmdInit , VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT ,
" Compute Upload: %dx%d->%dx%d " , mipUnscaledWidth , mipUnscaledHeight , mipWidth , mipHeight ) ;
vkCmdBindPipeline ( cmdInit , VK_PIPELINE_BIND_POINT_COMPUTE , computeShaderManager_ . GetPipeline ( uploadCS_ ) ) ;
vkCmdBindDescriptorSets ( cmdInit , VK_PIPELINE_BIND_POINT_COMPUTE , computeShaderManager_ . GetPipelineLayout ( ) , 0 , 1 , & descSet , 0 , nullptr ) ;
vkCmdPushConstants ( cmdInit , computeShaderManager_ . GetPipelineLayout ( ) , VK_SHADER_STAGE_COMPUTE_BIT , 0 , sizeof ( params ) , & params ) ;
vkCmdDispatch ( cmdInit , ( mipUnscaledWidth + 7 ) / 8 , ( mipUnscaledHeight + 7 ) / 8 , 1 ) ;
VK_PROFILE_END ( vulkan , cmdInit , VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT ) ;
vulkan - > Delete ( ) . QueueDeleteImageView ( view ) ;
2022-07-27 14:37:08 +02:00
} else {
2022-08-30 11:56:14 +02:00
loadLevel ( uploadSize , i = = 0 ? plan . baseLevelSrc : i , stride , plan . scaleFactor ) ;
2022-07-29 13:16:47 +02:00
VK_PROFILE_BEGIN ( vulkan , cmdInit , VK_PIPELINE_STAGE_TRANSFER_BIT ,
" Copy Upload: %dx%d " , mipWidth , mipHeight ) ;
2022-07-25 18:51:08 +02:00
entry - > vkTex - > UploadMip ( cmdInit , i , mipWidth , mipHeight , 0 , texBuf , bufferOffset , stride / bpp ) ;
2022-07-29 13:16:47 +02:00
VK_PROFILE_END ( vulkan , cmdInit , VK_PIPELINE_STAGE_TRANSFER_BIT ) ;
2016-04-30 13:44:31 -07:00
}
2022-09-01 19:05:20 -07:00
// Format might be wrong in lowMemoryMode_, so don't save.
if ( replacer_ . Enabled ( ) & & plan . replaced - > IsInvalid ( ) & & ! lowMemoryMode_ ) {
2022-07-27 14:37:08 +02:00
// When hardware texture scaling is enabled, this saves the original.
int w = dataScaled ? mipWidth : mipUnscaledWidth ;
int h = dataScaled ? mipHeight : mipUnscaledHeight ;
// At this point, data should be saveData, and not slow.
2022-09-01 15:02:51 +02:00
ReplacedTextureDecodeInfo replacedInfo ;
replacedInfo . cachekey = entry - > CacheKey ( ) ;
replacedInfo . hash = entry - > fullhash ;
replacedInfo . addr = entry - > addr ;
replacedInfo . isVideo = IsVideo ( entry - > addr ) ;
replacedInfo . isFinal = ( entry - > status & TexCacheEntry : : STATUS_TO_SCALE ) = = 0 ;
replacedInfo . scaleFactor = plan . scaleFactor ;
replacedInfo . fmt = FromVulkanFormat ( actualFmt ) ;
2022-08-21 22:03:16 -07:00
replacer_ . NotifyTextureDecoded ( replacedInfo , data , stride , plan . baseLevelSrc + i , w , h ) ;
2022-07-27 14:37:08 +02:00
}
2016-03-26 18:41:53 -07:00
}
2022-07-27 14:37:08 +02:00
}
2016-04-30 13:44:31 -07:00
2022-07-27 14:37:08 +02:00
VkImageLayout layout = computeUpload ? VK_IMAGE_LAYOUT_GENERAL : VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL ;
VkPipelineStageFlags prevStage = computeUpload ? VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT : VK_PIPELINE_STAGE_TRANSFER_BIT ;
// Generate any additional mipmap levels.
// This will transition the whole stack to GENERAL if it wasn't already.
2022-07-29 13:16:47 +02:00
if ( plan . levelsToLoad < plan . levelsToCreate ) {
VK_PROFILE_BEGIN ( vulkan , cmdInit , VK_PIPELINE_STAGE_TRANSFER_BIT , " Mipgen up to level %d " , plan . levelsToCreate ) ;
entry - > vkTex - > GenerateMips ( cmdInit , plan . levelsToLoad , computeUpload ) ;
2022-07-27 14:37:08 +02:00
layout = VK_IMAGE_LAYOUT_GENERAL ;
prevStage = VK_PIPELINE_STAGE_TRANSFER_BIT ;
VK_PROFILE_END ( vulkan , cmdInit , VK_PIPELINE_STAGE_TRANSFER_BIT ) ;
}
2020-12-10 11:18:43 +01:00
2022-07-27 14:37:08 +02:00
entry - > vkTex - > EndCreate ( cmdInit , false , prevStage , layout ) ;
VK_PROFILE_END ( vulkan , cmdInit , VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT ) ;
2020-10-05 21:34:22 +02:00
2022-07-30 19:55:56 +02:00
// Signal that we support depth textures so use it as one.
if ( plan . depth > 1 ) {
entry - > status | = TexCacheEntry : : STATUS_3D ;
}
2022-08-30 12:52:03 +02:00
if ( plan . replaceValid ) {
2022-07-29 13:16:47 +02:00
entry - > SetAlphaStatus ( TexCacheEntry : : TexStatus ( plan . replaced - > AlphaStatus ( ) ) ) ;
2016-01-03 23:09:37 +01:00
}
}
VkFormat TextureCacheVulkan : : GetDestFormat ( GETextureFormat format , GEPaletteFormat clutFormat ) const {
2020-03-01 14:07:13 +01:00
if ( ! gstate_c . Supports ( GPU_SUPPORTS_16BIT_FORMATS ) ) {
return VK_FORMAT_R8G8B8A8_UNORM ;
}
2016-01-03 23:09:37 +01:00
switch ( format ) {
case GE_TFMT_CLUT4 :
case GE_TFMT_CLUT8 :
case GE_TFMT_CLUT16 :
case GE_TFMT_CLUT32 :
return getClutDestFormatVulkan ( clutFormat ) ;
case GE_TFMT_4444 :
2016-01-09 21:19:18 +01:00
return VULKAN_4444_FORMAT ;
2016-01-03 23:09:37 +01:00
case GE_TFMT_5551 :
2016-01-09 21:19:18 +01:00
return VULKAN_1555_FORMAT ;
2016-01-03 23:09:37 +01:00
case GE_TFMT_5650 :
2016-01-09 21:19:18 +01:00
return VULKAN_565_FORMAT ;
2016-01-03 23:09:37 +01:00
case GE_TFMT_8888 :
case GE_TFMT_DXT1 :
case GE_TFMT_DXT3 :
case GE_TFMT_DXT5 :
default :
2016-01-09 23:27:53 +01:00
return VULKAN_8888_FORMAT ;
2016-01-03 23:09:37 +01:00
}
}
2022-04-15 12:34:50 +02:00
CheckAlphaResult TextureCacheVulkan : : CheckAlpha ( const u32 * pixelData , VkFormat dstFmt , int w ) {
2016-01-03 23:09:37 +01:00
switch ( dstFmt ) {
2016-01-09 23:27:53 +01:00
case VULKAN_4444_FORMAT :
2022-04-15 12:34:50 +02:00
return CheckAlpha16 ( ( const u16 * ) pixelData , w , 0xF000 ) ;
2016-01-09 23:27:53 +01:00
case VULKAN_1555_FORMAT :
2022-04-15 12:34:50 +02:00
return CheckAlpha16 ( ( const u16 * ) pixelData , w , 0x8000 ) ;
2016-01-09 23:27:53 +01:00
case VULKAN_565_FORMAT :
2016-01-03 23:09:37 +01:00
// Never has any alpha.
2022-04-15 12:34:50 +02:00
return CHECKALPHA_FULL ;
2016-01-03 23:09:37 +01:00
default :
2022-04-15 12:34:50 +02:00
return CheckAlpha32 ( pixelData , w , 0xFF000000 ) ;
2016-01-03 23:09:37 +01:00
}
}
2022-07-28 10:47:02 +02:00
void TextureCacheVulkan : : LoadTextureLevel ( TexCacheEntry & entry , uint8_t * writePtr , int rowPitch , int level , int scaleFactor , VkFormat dstFmt ) {
int w = gstate . getTextureWidth ( level ) ;
int h = gstate . getTextureHeight ( level ) ;
2016-06-25 09:10:55 -07:00
2022-07-27 14:37:08 +02:00
GETextureFormat tfmt = ( GETextureFormat ) entry . format ;
GEPaletteFormat clutformat = gstate . getClutPaletteFormat ( ) ;
2022-07-28 10:47:02 +02:00
u32 texaddr = gstate . getTextureAddress ( level ) ;
2022-04-22 22:58:47 +02:00
2022-07-27 14:37:08 +02:00
_assert_msg_ ( texaddr ! = 0 , " Can't load a texture from address null " )
2016-03-26 13:33:18 -07:00
2022-07-28 10:47:02 +02:00
int bufw = GetTextureBufw ( level , texaddr , tfmt ) ;
2022-07-27 14:37:08 +02:00
int bpp = dstFmt = = VULKAN_8888_FORMAT ? 4 : 2 ;
2022-04-23 22:51:02 +02:00
2022-07-30 16:51:29 +02:00
u32 * pixelData ;
int decPitch ;
2016-03-26 13:33:18 -07:00
2022-08-01 11:27:58 +02:00
bool expand32 = ! gstate_c . Supports ( GPU_SUPPORTS_16BIT_FORMATS ) | | scaleFactor > 1 | | dstFmt = = VULKAN_8888_FORMAT ;
2022-04-11 20:10:22 +02:00
2022-07-27 14:37:08 +02:00
if ( scaleFactor > 1 ) {
tmpTexBufRearrange_ . resize ( std : : max ( bufw , w ) * h ) ;
pixelData = tmpTexBufRearrange_ . data ( ) ;
// We want to end up with a neatly packed texture for scaling.
decPitch = w * bpp ;
2022-07-30 16:51:29 +02:00
} else {
pixelData = ( u32 * ) writePtr ;
decPitch = rowPitch ;
2022-07-27 14:37:08 +02:00
}
2017-11-12 15:53:59 -08:00
2022-07-28 10:47:02 +02:00
CheckAlphaResult alphaResult = DecodeTextureLevel ( ( u8 * ) pixelData , decPitch , tfmt , clutformat , texaddr , level , bufw , false , expand32 ) ;
entry . SetAlphaStatus ( alphaResult , level ) ;
2016-06-19 12:21:32 -07:00
2022-07-27 14:37:08 +02:00
if ( scaleFactor > 1 ) {
u32 fmt = dstFmt ;
// CPU scaling reads from the destination buffer so we want cached RAM.
uint8_t * rearrange = ( uint8_t * ) AllocateAlignedMemory ( w * scaleFactor * h * scaleFactor * 4 , 16 ) ;
2022-07-30 14:48:17 +02:00
scaler_ . ScaleAlways ( ( u32 * ) rearrange , pixelData , w , h , scaleFactor ) ;
2022-07-27 14:37:08 +02:00
pixelData = ( u32 * ) writePtr ;
// We always end up at 8888. Other parts assume this.
_assert_ ( dstFmt = = VULKAN_8888_FORMAT ) ;
bpp = sizeof ( u32 ) ;
decPitch = w * bpp ;
if ( decPitch ! = rowPitch ) {
for ( int y = 0 ; y < h ; + + y ) {
memcpy ( writePtr + rowPitch * y , rearrange + decPitch * y , w * bpp ) ;
2016-06-19 12:21:32 -07:00
}
2022-07-27 14:37:08 +02:00
decPitch = rowPitch ;
} else {
memcpy ( writePtr , rearrange , w * h * 4 ) ;
2016-01-03 23:09:37 +01:00
}
2022-07-27 14:37:08 +02:00
FreeAlignedMemory ( rearrange ) ;
2016-01-03 23:09:37 +01:00
}
2016-01-03 00:46:41 +01:00
}
2017-10-18 13:03:49 +02:00
2022-08-05 15:09:33 +02:00
void TextureCacheVulkan : : BoundFramebufferTexture ( ) {
imageView_ = ( VkImageView ) draw_ - > GetNativeObject ( Draw : : NativeObject : : BOUND_TEXTURE0_IMAGEVIEW ) ;
}
2017-10-18 13:03:49 +02:00
bool TextureCacheVulkan : : GetCurrentTextureDebug ( GPUDebugBuffer & buffer , int level ) {
2020-09-20 20:35:42 +02:00
SetTexture ( ) ;
2020-09-13 09:33:06 +02:00
if ( ! nextTexture_ ) {
if ( nextFramebufferTexture_ ) {
VirtualFramebuffer * vfb = nextFramebufferTexture_ ;
buffer . Allocate ( vfb - > bufferWidth , vfb - > bufferHeight , GPU_DBG_FORMAT_8888 , false ) ;
bool retval = draw_ - > CopyFramebufferToMemorySync ( vfb - > fbo , Draw : : FB_COLOR_BIT , 0 , 0 , vfb - > bufferWidth , vfb - > bufferHeight , Draw : : DataFormat : : R8G8B8A8_UNORM , buffer . GetData ( ) , vfb - > bufferWidth , " GetCurrentTextureDebug " ) ;
// Vulkan requires us to re-apply all dynamic state for each command buffer, and the above will cause us to start a new cmdbuf.
// So let's dirty the things that are involved in Vulkan dynamic state. Readbacks are not frequent so this won't hurt other backends.
gstate_c . Dirty ( DIRTY_VIEWPORTSCISSOR_STATE | DIRTY_BLEND_STATE | DIRTY_DEPTHSTENCIL_STATE ) ;
// We may have blitted to a temp FBO.
framebufferManager_ - > RebindFramebuffer ( " RebindFramebuffer - GetCurrentTextureDebug " ) ;
2022-08-21 16:47:46 -07:00
if ( ! retval )
ERROR_LOG ( G3D , " Failed to get debug texture: copy to memory failed " ) ;
2020-09-13 09:33:06 +02:00
return retval ;
} else {
return false ;
}
}
2017-11-05 18:04:56 -08:00
2018-01-19 22:41:18 -08:00
// Apply texture may need to rebuild the texture if we're about to render, or bind a framebuffer.
TexCacheEntry * entry = nextTexture_ ;
ApplyTexture ( ) ;
2018-04-05 23:23:14 +02:00
if ( ! entry - > vkTex )
2017-11-05 18:04:56 -08:00
return false ;
2021-12-12 18:20:42 +01:00
2018-04-05 23:23:14 +02:00
VulkanTexture * texture = entry - > vkTex ;
2017-11-05 18:04:56 -08:00
VulkanRenderManager * renderManager = ( VulkanRenderManager * ) draw_ - > GetNativeObject ( Draw : : NativeObject : : RENDER_MANAGER ) ;
GPUDebugBufferFormat bufferFormat ;
Draw : : DataFormat drawFormat ;
switch ( texture - > GetFormat ( ) ) {
case VULKAN_565_FORMAT :
bufferFormat = GPU_DBG_FORMAT_565 ;
drawFormat = Draw : : DataFormat : : B5G6R5_UNORM_PACK16 ;
break ;
case VULKAN_1555_FORMAT :
bufferFormat = GPU_DBG_FORMAT_5551 ;
drawFormat = Draw : : DataFormat : : B5G5R5A1_UNORM_PACK16 ;
break ;
case VULKAN_4444_FORMAT :
bufferFormat = GPU_DBG_FORMAT_4444 ;
drawFormat = Draw : : DataFormat : : B4G4R4A4_UNORM_PACK16 ;
break ;
case VULKAN_8888_FORMAT :
default :
bufferFormat = GPU_DBG_FORMAT_8888 ;
drawFormat = Draw : : DataFormat : : R8G8B8A8_UNORM ;
break ;
}
int w = texture - > GetWidth ( ) ;
int h = texture - > GetHeight ( ) ;
2022-05-01 15:23:35 -07:00
if ( level > 0 ) {
// In the future, maybe this could do something for 3D textures...
if ( level > = texture - > GetNumMips ( ) )
return false ;
w > > = level ;
h > > = level ;
}
2017-11-05 18:04:56 -08:00
buffer . Allocate ( w , h , bufferFormat ) ;
2020-05-21 11:24:05 +02:00
renderManager - > CopyImageToMemorySync ( texture - > GetImage ( ) , level , 0 , 0 , w , h , drawFormat , ( uint8_t * ) buffer . GetData ( ) , w , " GetCurrentTextureDebug " ) ;
2017-11-05 18:04:56 -08:00
// Vulkan requires us to re-apply all dynamic state for each command buffer, and the above will cause us to start a new cmdbuf.
// So let's dirty the things that are involved in Vulkan dynamic state. Readbacks are not frequent so this won't hurt other backends.
gstate_c . Dirty ( DIRTY_VIEWPORTSCISSOR_STATE | DIRTY_BLEND_STATE | DIRTY_DEPTHSTENCIL_STATE ) ;
2020-06-02 09:51:38 +02:00
framebufferManager_ - > RebindFramebuffer ( " RebindFramebuffer - GetCurrentTextureDebug " ) ;
2017-11-05 18:04:56 -08:00
return true ;
2017-10-18 13:03:49 +02:00
}
2017-12-03 10:29:41 +01:00
void TextureCacheVulkan : : GetStats ( char * ptr , size_t size ) {
2021-11-22 09:44:05 +01:00
snprintf ( ptr , size , " N/A " ) ;
2017-12-03 10:29:41 +01:00
}
2017-12-06 16:01:56 +01:00
std : : vector < std : : string > TextureCacheVulkan : : DebugGetSamplerIDs ( ) const {
return samplerCache_ . DebugGetSamplerIDs ( ) ;
}
std : : string TextureCacheVulkan : : DebugGetSamplerString ( std : : string id , DebugShaderStringType stringType ) {
return samplerCache_ . DebugGetSamplerString ( id , stringType ) ;
2017-12-13 23:11:40 +01:00
}