2015-10-10 16:41:19 +02:00
// Copyright (c) 2012- PPSSPP Project.
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, version 2.0 or later versions.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License 2.0 for more details.
// A copy of the GPL 2.0 should have been included with the program.
// If not, see http://www.gnu.org/licenses/
// Official git repository and contact information can be found at
// https://github.com/hrydgard/ppsspp and http://www.ppsspp.org/.
2023-01-18 07:28:12 -08:00
# include "ppsspp_config.h"
2020-09-15 00:34:45 +02:00
# include <algorithm>
2022-11-24 10:38:49 +01:00
# include <functional>
2020-09-15 00:34:45 +02:00
2020-10-04 00:25:21 +02:00
# include "Common/Data/Convert/SmallDataConvert.h"
2020-10-04 10:04:01 +02:00
# include "Common/Profiler/Profiler.h"
2020-10-04 23:24:14 +02:00
# include "Common/GPU/Vulkan/VulkanRenderManager.h"
2015-10-10 16:41:19 +02:00
2020-08-15 12:25:39 +02:00
# include "Common/Log.h"
2015-10-10 16:41:19 +02:00
# include "Common/MemoryUtil.h"
2020-08-15 20:53:08 +02:00
# include "Common/TimeUtil.h"
2015-10-10 16:41:19 +02:00
# include "Core/MemMap.h"
# include "Core/System.h"
# include "Core/Config.h"
# include "Core/CoreTiming.h"
# include "GPU/Math3D.h"
# include "GPU/GPUState.h"
# include "GPU/ge_constants.h"
2020-10-04 23:24:14 +02:00
# include "Common/GPU/Vulkan/VulkanContext.h"
# include "Common/GPU/Vulkan/VulkanMemory.h"
2016-03-20 09:35:10 +01:00
2015-10-10 16:41:19 +02:00
# include "GPU/Common/SplineCommon.h"
# include "GPU/Common/TransformCommon.h"
# include "GPU/Common/VertexDecoderCommon.h"
# include "GPU/Common/SoftwareTransformCommon.h"
# include "GPU/Common/DrawEngineCommon.h"
2022-10-18 00:26:10 +02:00
# include "GPU/Common/ShaderUniforms.h"
2018-09-01 08:32:03 -07:00
# include "GPU/Debugger/Debugger.h"
2015-10-10 16:41:19 +02:00
# include "GPU/Vulkan/DrawEngineVulkan.h"
# include "GPU/Vulkan/TextureCacheVulkan.h"
# include "GPU/Vulkan/ShaderManagerVulkan.h"
# include "GPU/Vulkan/PipelineManagerVulkan.h"
2020-08-03 23:22:11 +02:00
# include "GPU/Vulkan/FramebufferManagerVulkan.h"
2015-10-10 16:41:19 +02:00
# include "GPU/Vulkan/GPU_Vulkan.h"
2021-08-21 12:39:15 +02:00
using namespace PPSSPP_VK ;
2017-08-17 11:22:23 +02:00
enum {
2017-08-18 13:39:42 +02:00
VERTEX_CACHE_SIZE = 8192 * 1024
2017-08-17 11:22:23 +02:00
} ;
# define VERTEXCACHE_DECIMATION_INTERVAL 17
2017-11-01 01:25:24 +01:00
# define DESCRIPTORSET_DECIMATION_INTERVAL 1 // Temporarily cut to 1. Handle reuse breaks this when textures get deleted.
2017-08-17 11:22:23 +02:00
enum { VAI_KILL_AGE = 120 , VAI_UNRELIABLE_KILL_AGE = 240 , VAI_UNRELIABLE_KILL_MAX = 4 } ;
2015-10-10 16:41:19 +02:00
enum {
TRANSFORMED_VERTEX_BUFFER_SIZE = VERTEX_BUFFER_MAX * sizeof ( TransformedVertex )
} ;
2021-11-14 15:25:28 -08:00
DrawEngineVulkan : : DrawEngineVulkan ( Draw : : DrawContext * draw )
: draw_ ( draw ) , vai_ ( 1024 ) {
2016-01-10 13:47:56 +01:00
decOptions_ . expandAllWeightsToFloat = false ;
decOptions_ . expand8BitNormalsToFloat = false ;
2023-01-18 07:28:12 -08:00
# if PPSSPP_PLATFORM(MAC) || PPSSPP_PLATFORM(IOS)
decOptions_ . alignOutputToWord = true ;
# endif
2015-10-10 16:41:19 +02:00
indexGen . Setup ( decIndex ) ;
2016-10-09 10:53:01 -07:00
}
void DrawEngineVulkan : : InitDeviceObjects ( ) {
2016-01-06 12:52:42 +01:00
// All resources we need for PSP drawing. Usually only bindings 0 and 2-4 are populated.
2022-02-19 20:40:27 +01:00
// TODO: Make things more flexible, so we at least have specialized layouts for input attachments and tess.
// Note that it becomes a support matrix..
2022-11-06 16:56:21 +01:00
VkDescriptorSetLayoutBinding bindings [ DRAW_BINDING_COUNT ] { } ;
2016-01-03 18:31:03 +01:00
bindings [ 0 ] . descriptorCount = 1 ;
bindings [ 0 ] . descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ;
bindings [ 0 ] . stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT ;
2016-01-09 11:07:14 +01:00
bindings [ 0 ] . binding = DRAW_BINDING_TEXTURE ;
2016-01-03 18:31:03 +01:00
bindings [ 1 ] . descriptorCount = 1 ;
bindings [ 1 ] . descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ;
bindings [ 1 ] . stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT ;
2016-01-09 11:07:14 +01:00
bindings [ 1 ] . binding = DRAW_BINDING_2ND_TEXTURE ;
2016-01-03 18:31:03 +01:00
bindings [ 2 ] . descriptorCount = 1 ;
2018-04-13 12:25:57 +02:00
bindings [ 2 ] . descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ; // sampler is ignored though.
bindings [ 2 ] . stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT ;
bindings [ 2 ] . binding = DRAW_BINDING_DEPAL_TEXTURE ;
2016-01-03 18:31:03 +01:00
bindings [ 3 ] . descriptorCount = 1 ;
bindings [ 3 ] . descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ;
2018-04-13 12:25:57 +02:00
bindings [ 3 ] . stageFlags = VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT ;
2022-10-17 08:30:27 +02:00
if ( gstate_c . Use ( GPU_USE_GS_CULLING ) )
2022-10-01 20:01:23 -07:00
bindings [ 3 ] . stageFlags | = VK_SHADER_STAGE_GEOMETRY_BIT ;
2018-04-13 12:25:57 +02:00
bindings [ 3 ] . binding = DRAW_BINDING_DYNUBO_BASE ;
2016-01-03 18:31:03 +01:00
bindings [ 4 ] . descriptorCount = 1 ;
2018-04-10 12:22:02 +02:00
bindings [ 4 ] . descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ;
2016-01-03 18:31:03 +01:00
bindings [ 4 ] . stageFlags = VK_SHADER_STAGE_VERTEX_BIT ;
2018-04-13 12:25:57 +02:00
bindings [ 4 ] . binding = DRAW_BINDING_DYNUBO_LIGHT ;
2018-04-10 12:22:02 +02:00
bindings [ 5 ] . descriptorCount = 1 ;
2018-04-13 12:25:57 +02:00
bindings [ 5 ] . descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ;
2018-04-10 12:22:02 +02:00
bindings [ 5 ] . stageFlags = VK_SHADER_STAGE_VERTEX_BIT ;
2018-04-13 12:25:57 +02:00
bindings [ 5 ] . binding = DRAW_BINDING_DYNUBO_BONE ;
// Used only for hardware tessellation.
bindings [ 6 ] . descriptorCount = 1 ;
bindings [ 6 ] . descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER ;
bindings [ 6 ] . stageFlags = VK_SHADER_STAGE_VERTEX_BIT ;
bindings [ 6 ] . binding = DRAW_BINDING_TESS_STORAGE_BUF ;
2018-07-11 01:09:20 +09:00
bindings [ 7 ] . descriptorCount = 1 ;
bindings [ 7 ] . descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER ;
bindings [ 7 ] . stageFlags = VK_SHADER_STAGE_VERTEX_BIT ;
bindings [ 7 ] . binding = DRAW_BINDING_TESS_STORAGE_BUF_WU ;
bindings [ 8 ] . descriptorCount = 1 ;
bindings [ 8 ] . descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER ;
bindings [ 8 ] . stageFlags = VK_SHADER_STAGE_VERTEX_BIT ;
bindings [ 8 ] . binding = DRAW_BINDING_TESS_STORAGE_BUF_WV ;
2022-12-13 15:16:46 +01:00
// Note: This binding is not included if !gstate_c.Use(GPU_USE_FRAMEBUFFER_FETCH), using bindingCount below.
2022-02-19 20:40:27 +01:00
bindings [ 9 ] . descriptorCount = 1 ;
bindings [ 9 ] . descriptorType = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT ;
bindings [ 9 ] . stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT ;
bindings [ 9 ] . binding = DRAW_BINDING_INPUT_ATTACHMENT ;
2016-01-03 18:31:03 +01:00
2021-11-14 15:25:28 -08:00
VulkanContext * vulkan = ( VulkanContext * ) draw_ - > GetNativeObject ( Draw : : NativeObject : : CONTEXT ) ;
VkDevice device = vulkan - > GetDevice ( ) ;
2016-01-03 18:31:03 +01:00
2018-02-24 16:55:32 +01:00
VkDescriptorSetLayoutCreateInfo dsl { VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO } ;
2022-12-13 15:16:46 +01:00
dsl . bindingCount = gstate_c . Use ( GPU_USE_FRAMEBUFFER_FETCH ) ? ARRAY_SIZE ( bindings ) : ARRAY_SIZE ( bindings ) - 1 ;
2016-01-03 18:31:03 +01:00
dsl . pBindings = bindings ;
VkResult res = vkCreateDescriptorSetLayout ( device , & dsl , nullptr , & descriptorSetLayout_ ) ;
2020-08-16 00:38:55 +02:00
_dbg_assert_ ( VK_SUCCESS = = res ) ;
2022-09-08 09:15:06 +02:00
vulkan - > SetDebugName ( descriptorSetLayout_ , VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT , " drawengine_d_layout " ) ;
2016-01-03 18:31:03 +01:00
2021-12-09 18:39:55 -08:00
static constexpr int DEFAULT_DESC_POOL_SIZE = 512 ;
std : : vector < VkDescriptorPoolSize > dpTypes ;
2022-10-28 10:20:47 +02:00
dpTypes . resize ( 5 ) ;
2021-12-09 18:39:55 -08:00
dpTypes [ 0 ] . descriptorCount = DEFAULT_DESC_POOL_SIZE * 3 ;
dpTypes [ 0 ] . type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ;
dpTypes [ 1 ] . descriptorCount = DEFAULT_DESC_POOL_SIZE * 3 ; // Don't use these for tess anymore, need max three per set.
dpTypes [ 1 ] . type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ;
dpTypes [ 2 ] . descriptorCount = DEFAULT_DESC_POOL_SIZE * 3 ; // TODO: Use a separate layout when no spline stuff is needed to reduce the need for these.
dpTypes [ 2 ] . type = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER ;
2022-09-10 01:11:02 +02:00
dpTypes [ 3 ] . descriptorCount = DEFAULT_DESC_POOL_SIZE ; // TODO: Use a separate layout when no spline stuff is needed to reduce the need for these.
dpTypes [ 3 ] . type = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT ;
2022-11-24 10:38:49 +01:00
dpTypes [ 4 ] . descriptorCount = DEFAULT_DESC_POOL_SIZE ; // For the frame global uniform buffer. Might need to allocate multiple times.
2022-10-28 10:20:47 +02:00
dpTypes [ 4 ] . type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ;
2021-12-09 18:39:55 -08:00
VkDescriptorPoolCreateInfo dp { VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO } ;
// Don't want to mess around with individually freeing these.
// We zap the whole pool every few frames.
dp . flags = 0 ;
dp . maxSets = DEFAULT_DESC_POOL_SIZE ;
2016-01-03 18:31:03 +01:00
// We are going to use one-shot descriptors in the initial implementation. Might look into caching them
// if creating and updating them turns out to be expensive.
2017-08-17 17:55:21 +02:00
for ( int i = 0 ; i < VulkanContext : : MAX_INFLIGHT_FRAMES ; i + + ) {
2021-12-09 18:39:55 -08:00
frame_ [ i ] . descPool . Create ( vulkan , dp , dpTypes ) ;
2023-03-15 10:09:39 +01:00
// Note that pushUBO_ is also used for tessellation data (search for SetPushBuffer), and to upload
2019-03-13 23:31:54 +01:00
// the null texture. This should be cleaned up...
2016-01-03 18:31:03 +01:00
}
2023-03-15 10:09:39 +01:00
pushUBO_ = ( VulkanPushPool * ) draw_ - > GetNativeObject ( Draw : : NativeObject : : PUSH_POOL ) ;
pushVertex_ = new VulkanPushPool ( vulkan , " pushVertex " , 4 * 1024 * 1024 , VK_BUFFER_USAGE_VERTEX_BUFFER_BIT ) ;
pushIndex_ = new VulkanPushPool ( vulkan , " pushIndex " , 1 * 512 * 1024 , VK_BUFFER_USAGE_INDEX_BUFFER_BIT ) ;
2023-03-14 23:13:08 +01:00
2018-02-24 16:55:32 +01:00
VkPipelineLayoutCreateInfo pl { VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO } ;
2016-01-03 18:31:03 +01:00
pl . pPushConstantRanges = nullptr ;
pl . pushConstantRangeCount = 0 ;
2022-12-16 13:03:44 +01:00
VkDescriptorSetLayout layouts [ 1 ] = { descriptorSetLayout_ } ;
2022-10-18 00:26:10 +02:00
pl . setLayoutCount = ARRAY_SIZE ( layouts ) ;
pl . pSetLayouts = layouts ;
2016-01-25 00:58:47 +01:00
pl . flags = 0 ;
2022-10-28 09:23:44 +02:00
2016-01-03 18:31:03 +01:00
res = vkCreatePipelineLayout ( device , & pl , nullptr , & pipelineLayout_ ) ;
2020-08-16 00:38:55 +02:00
_dbg_assert_ ( VK_SUCCESS = = res ) ;
2016-01-03 18:31:03 +01:00
2022-09-08 09:15:06 +02:00
vulkan - > SetDebugName ( pipelineLayout_ , VK_OBJECT_TYPE_PIPELINE_LAYOUT , " drawengine_p_layout " ) ;
2018-02-24 16:55:32 +01:00
VkSamplerCreateInfo samp { VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO } ;
2016-01-09 01:23:32 +01:00
samp . addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE ;
samp . addressModeV = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE ;
samp . addressModeW = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE ;
2022-07-20 00:05:07 +02:00
samp . magFilter = VK_FILTER_LINEAR ;
samp . minFilter = VK_FILTER_LINEAR ;
2022-08-24 09:31:47 +02:00
samp . maxLod = VK_LOD_CLAMP_NONE ; // recommended by best practices, has no effect since we don't use mipmaps.
res = vkCreateSampler ( device , & samp , nullptr , & samplerSecondaryLinear_ ) ;
samp . magFilter = VK_FILTER_NEAREST ;
samp . minFilter = VK_FILTER_NEAREST ;
res = vkCreateSampler ( device , & samp , nullptr , & samplerSecondaryNearest_ ) ;
2020-09-15 23:09:58 +02:00
_dbg_assert_ ( VK_SUCCESS = = res ) ;
2016-03-20 22:46:49 +01:00
res = vkCreateSampler ( device , & samp , nullptr , & nullSampler_ ) ;
2020-08-16 00:38:55 +02:00
_dbg_assert_ ( VK_SUCCESS = = res ) ;
2017-08-17 11:22:23 +02:00
2023-03-15 09:59:00 +01:00
vertexCache_ = new VulkanPushBuffer ( vulkan , " pushVertexCache " , VERTEX_CACHE_SIZE , VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT ) ;
2017-11-10 12:40:51 +01:00
2021-11-14 15:25:28 -08:00
tessDataTransferVulkan = new TessellationDataTransferVulkan ( vulkan ) ;
2018-07-11 01:09:20 +09:00
tessDataTransfer = tessDataTransferVulkan ;
2022-11-24 10:38:49 +01:00
draw_ - > SetInvalidationCallback ( std : : bind ( & DrawEngineVulkan : : Invalidate , this , std : : placeholders : : _1 ) ) ;
2016-01-03 18:31:03 +01:00
}
2015-10-10 16:41:19 +02:00
DrawEngineVulkan : : ~ DrawEngineVulkan ( ) {
2016-10-09 10:53:01 -07:00
DestroyDeviceObjects ( ) ;
}
void DrawEngineVulkan : : FrameData : : Destroy ( VulkanContext * vulkan ) {
2021-12-09 18:39:55 -08:00
descPool . Destroy ( ) ;
2016-10-09 10:53:01 -07:00
}
void DrawEngineVulkan : : DestroyDeviceObjects ( ) {
2022-11-24 10:38:49 +01:00
if ( ! draw_ ) {
// We've already done this from LostDevice.
return ;
}
VulkanContext * vulkan = ( VulkanContext * ) draw_ - > GetNativeObject ( Draw : : NativeObject : : CONTEXT ) ;
draw_ - > SetInvalidationCallback ( InvalidationCallback ( ) ) ;
2021-11-14 15:25:28 -08:00
2018-07-13 18:35:44 +09:00
delete tessDataTransferVulkan ;
2018-12-01 13:59:47 -08:00
tessDataTransfer = nullptr ;
tessDataTransferVulkan = nullptr ;
2017-11-10 12:40:51 +01:00
2017-10-26 11:33:52 +02:00
for ( int i = 0 ; i < VulkanContext : : MAX_INFLIGHT_FRAMES ; i + + ) {
2021-11-14 15:25:28 -08:00
frame_ [ i ] . Destroy ( vulkan ) ;
2016-01-06 12:52:42 +01:00
}
2023-03-14 23:13:08 +01:00
2023-03-15 10:09:39 +01:00
pushUBO_ = nullptr ;
if ( pushVertex_ ) {
pushVertex_ - > Destroy ( ) ;
delete pushVertex_ ;
pushVertex_ = nullptr ;
2023-03-14 23:13:08 +01:00
}
2023-03-15 10:09:39 +01:00
if ( pushIndex_ ) {
pushIndex_ - > Destroy ( ) ;
delete pushIndex_ ;
pushIndex_ = nullptr ;
2023-03-14 23:13:08 +01:00
}
2023-03-14 23:21:43 +01:00
2022-08-24 09:31:47 +02:00
if ( samplerSecondaryNearest_ ! = VK_NULL_HANDLE )
vulkan - > Delete ( ) . QueueDeleteSampler ( samplerSecondaryNearest_ ) ;
if ( samplerSecondaryLinear_ ! = VK_NULL_HANDLE )
vulkan - > Delete ( ) . QueueDeleteSampler ( samplerSecondaryLinear_ ) ;
2016-10-09 10:53:01 -07:00
if ( nullSampler_ ! = VK_NULL_HANDLE )
2021-11-14 15:25:28 -08:00
vulkan - > Delete ( ) . QueueDeleteSampler ( nullSampler_ ) ;
2016-10-09 10:53:01 -07:00
if ( pipelineLayout_ ! = VK_NULL_HANDLE )
2021-11-14 15:25:28 -08:00
vulkan - > Delete ( ) . QueueDeletePipelineLayout ( pipelineLayout_ ) ;
2016-10-09 10:53:01 -07:00
if ( descriptorSetLayout_ ! = VK_NULL_HANDLE )
2021-11-14 15:25:28 -08:00
vulkan - > Delete ( ) . QueueDeleteDescriptorSetLayout ( descriptorSetLayout_ ) ;
2017-11-29 19:15:08 +01:00
if ( vertexCache_ ) {
2021-11-14 15:25:28 -08:00
vertexCache_ - > Destroy ( vulkan ) ;
2017-11-29 19:15:08 +01:00
delete vertexCache_ ;
vertexCache_ = nullptr ;
}
2022-08-24 09:31:47 +02:00
2017-11-24 13:53:23 +01:00
// Need to clear this to get rid of all remaining references to the dead buffers.
vai_ . Iterate ( [ ] ( uint32_t hash , VertexArrayInfoVulkan * vai ) {
delete vai ;
} ) ;
vai_ . Clear ( ) ;
2016-10-09 10:53:01 -07:00
}
void DrawEngineVulkan : : DeviceLost ( ) {
DestroyDeviceObjects ( ) ;
DirtyAllUBOs ( ) ;
2022-02-06 23:34:44 -08:00
draw_ = nullptr ;
2016-10-09 10:53:01 -07:00
}
2021-11-14 15:25:28 -08:00
void DrawEngineVulkan : : DeviceRestore ( Draw : : DrawContext * draw ) {
2017-11-09 16:28:22 +01:00
draw_ = draw ;
2016-10-09 10:53:01 -07:00
InitDeviceObjects ( ) ;
2015-10-10 16:41:19 +02:00
}
2016-01-09 23:27:53 +01:00
void DrawEngineVulkan : : BeginFrame ( ) {
2022-08-05 21:11:33 +02:00
gpuStats . numTrackedVertexArrays = ( int ) vai_ . size ( ) ;
2017-08-15 10:38:20 +02:00
lastPipeline_ = nullptr ;
2023-03-15 00:02:57 +01:00
// pushUBO is the thin3d push pool, don't need to BeginFrame again.
2023-03-15 10:09:39 +01:00
pushVertex_ - > BeginFrame ( ) ;
pushIndex_ - > BeginFrame ( ) ;
2023-03-14 23:21:43 +01:00
2023-03-15 10:09:39 +01:00
tessDataTransferVulkan - > SetPushPool ( pushUBO_ ) ;
2023-03-14 23:21:43 +01:00
DirtyAllUBOs ( ) ;
2021-11-14 15:25:28 -08:00
FrameData * frame = & GetCurFrame ( ) ;
2016-03-20 09:52:13 +01:00
// First reset all buffers, then begin. This is so that Reset can free memory and Begin can allocate it,
// if growing the buffer is needed. Doing it this way will reduce fragmentation if more than one buffer
// needs to grow in the same frame. The state where many buffers are reset can also be used to
// defragment memory.
2021-11-14 15:25:28 -08:00
VulkanContext * vulkan = ( VulkanContext * ) draw_ - > GetNativeObject ( Draw : : NativeObject : : CONTEXT ) ;
2022-10-18 00:26:10 +02:00
2017-08-17 11:22:23 +02:00
// Wipe the vertex cache if it's grown too large.
if ( vertexCache_ - > GetTotalSize ( ) > VERTEX_CACHE_SIZE ) {
2021-11-14 15:25:28 -08:00
vertexCache_ - > Destroy ( vulkan ) ;
2017-08-17 11:22:23 +02:00
delete vertexCache_ ; // orphans the buffers, they'll get deleted once no longer used by an in-flight frame.
2023-03-15 09:59:00 +01:00
vertexCache_ = new VulkanPushBuffer ( vulkan , " vertexCacheR " , VERTEX_CACHE_SIZE , VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT ) ;
2020-09-15 22:53:01 +02:00
vai_ . Iterate ( [ & ] ( uint32_t hash , VertexArrayInfoVulkan * vai ) {
delete vai ;
} ) ;
2017-08-20 11:30:19 +02:00
vai_ . Clear ( ) ;
2017-08-17 11:22:23 +02:00
}
vertexCache_ - > BeginNoReset ( ) ;
2018-02-24 10:24:11 +01:00
if ( - - descDecimationCounter_ < = 0 ) {
2021-12-09 18:39:55 -08:00
frame - > descPool . Reset ( ) ;
2017-08-20 21:35:03 +02:00
descDecimationCounter_ = DESCRIPTORSET_DECIMATION_INTERVAL ;
}
if ( - - decimationCounter_ < = 0 ) {
2017-08-17 11:22:23 +02:00
decimationCounter_ = VERTEXCACHE_DECIMATION_INTERVAL ;
const int threshold = gpuStats . numFlips - VAI_KILL_AGE ;
const int unreliableThreshold = gpuStats . numFlips - VAI_UNRELIABLE_KILL_AGE ;
int unreliableLeft = VAI_UNRELIABLE_KILL_MAX ;
2017-08-20 11:30:19 +02:00
vai_ . Iterate ( [ & ] ( uint32_t hash , VertexArrayInfoVulkan * vai ) {
2017-08-17 11:22:23 +02:00
bool kill ;
2017-08-20 11:30:19 +02:00
if ( vai - > status = = VertexArrayInfoVulkan : : VAI_UNRELIABLE ) {
2017-08-17 11:22:23 +02:00
// We limit killing unreliable so we don't rehash too often.
2017-08-20 11:30:19 +02:00
kill = vai - > lastFrame < unreliableThreshold & & - - unreliableLeft > = 0 ;
2017-08-17 11:22:23 +02:00
} else {
2017-08-20 11:30:19 +02:00
kill = vai - > lastFrame < threshold ;
2017-08-17 11:22:23 +02:00
}
if ( kill ) {
2017-08-20 11:30:19 +02:00
// This is actually quite safe.
vai_ . Remove ( hash ) ;
delete vai ;
2017-08-17 11:22:23 +02:00
}
2017-08-20 11:30:19 +02:00
} ) ;
2017-08-17 11:22:23 +02:00
}
2017-12-02 18:27:18 -08:00
vai_ . Maintain ( ) ;
2016-01-09 23:27:53 +01:00
}
void DrawEngineVulkan : : EndFrame ( ) {
2021-11-14 15:25:28 -08:00
FrameData * frame = & GetCurFrame ( ) ;
2023-03-15 10:19:00 +01:00
stats_ . pushVertexSpaceUsed = ( int ) pushVertex_ - > GetUsedThisFrame ( ) ;
stats_ . pushIndexSpaceUsed = ( int ) pushIndex_ - > GetUsedThisFrame ( ) ;
2017-08-17 11:22:23 +02:00
vertexCache_ - > End ( ) ;
2016-01-09 23:27:53 +01:00
}
2017-11-19 12:33:20 +01:00
void DrawEngineVulkan : : DecodeVertsToPushBuffer ( VulkanPushBuffer * push , uint32_t * bindOffset , VkBuffer * vkbuf ) {
2016-03-20 16:06:11 +01:00
u8 * dest = decoded ;
// Figure out how much pushbuffer space we need to allocate.
if ( push ) {
2017-08-17 11:22:23 +02:00
int vertsToDecode = ComputeNumVertsToDecode ( ) ;
2023-03-15 09:56:32 +01:00
dest = ( u8 * ) push - > Allocate ( vertsToDecode * dec_ - > GetDecVtxFmt ( ) . stride , 4 , vkbuf , bindOffset ) ;
2015-10-10 16:41:19 +02:00
}
2017-11-19 12:33:20 +01:00
DecodeVerts ( dest ) ;
}
2015-10-10 16:41:19 +02:00
2023-03-14 23:13:08 +01:00
void DrawEngineVulkan : : DecodeVertsToPushPool ( VulkanPushPool * push , uint32_t * bindOffset , VkBuffer * vkbuf ) {
u8 * dest = decoded ;
// Figure out how much pushbuffer space we need to allocate.
if ( push ) {
int vertsToDecode = ComputeNumVertsToDecode ( ) ;
dest = push - > Allocate ( vertsToDecode * dec_ - > GetDecVtxFmt ( ) . stride , 4 , vkbuf , bindOffset ) ;
}
DecodeVerts ( dest ) ;
}
2018-04-10 12:22:02 +02:00
VkDescriptorSet DrawEngineVulkan : : GetOrCreateDescriptorSet ( VkImageView imageView , VkSampler sampler , VkBuffer base , VkBuffer light , VkBuffer bone , bool tess ) {
2020-09-17 00:49:17 +02:00
_dbg_assert_ ( base ! = VK_NULL_HANDLE ) ;
_dbg_assert_ ( light ! = VK_NULL_HANDLE ) ;
_dbg_assert_ ( bone ! = VK_NULL_HANDLE ) ;
2022-09-23 12:24:43 +02:00
DescriptorSetKey key { } ;
2016-01-09 22:14:19 +01:00
key . imageView_ = imageView ;
2016-01-09 01:23:32 +01:00
key . sampler_ = sampler ;
2017-10-31 12:02:10 +01:00
key . secondaryImageView_ = boundSecondary_ ;
2018-04-13 12:25:57 +02:00
key . depalImageView_ = boundDepal_ ;
2016-03-20 09:52:13 +01:00
key . base_ = base ;
key . light_ = light ;
2018-04-10 12:22:02 +02:00
key . bone_ = bone ;
2022-09-16 19:18:38 +02:00
key . secondaryIsInputAttachment = boundSecondaryIsInputAttachment_ ;
2017-11-12 10:17:49 +01:00
2021-11-14 15:25:28 -08:00
FrameData & frame = GetCurFrame ( ) ;
2017-11-12 10:17:49 +01:00
// See if we already have this descriptor set cached.
if ( ! tess ) { // Don't cache descriptors for HW tessellation.
2018-02-24 10:24:11 +01:00
VkDescriptorSet d = frame . descSets . Get ( key ) ;
2017-08-20 11:30:19 +02:00
if ( d ! = VK_NULL_HANDLE )
return d ;
2016-01-09 01:23:32 +01:00
}
2016-01-03 18:31:03 +01:00
2016-01-09 22:14:19 +01:00
// Didn't find one in the frame descriptor set cache, let's make a new one.
// We wipe the cache on every frame.
2022-10-26 13:29:56 +02:00
VkDescriptorSet desc = frame . descPool . Allocate ( 1 , & descriptorSetLayout_ , " game_descset " ) ;
2018-02-24 13:35:15 +01:00
2017-10-26 00:55:09 +02:00
// Even in release mode, this is bad.
2021-12-09 18:39:55 -08:00
_assert_msg_ ( desc ! = VK_NULL_HANDLE , " Ran out of descriptor space in pool. sz=%d " , ( int ) frame . descSets . size ( ) ) ;
2016-01-09 11:07:14 +01:00
2018-04-13 12:25:57 +02:00
// We just don't write to the slots we don't care about, which is fine.
2022-11-06 16:56:21 +01:00
VkWriteDescriptorSet writes [ DRAW_BINDING_COUNT ] { } ;
2016-01-09 01:23:32 +01:00
// Main texture
2016-01-09 11:07:14 +01:00
int n = 0 ;
2018-04-13 12:25:57 +02:00
VkDescriptorImageInfo tex [ 3 ] { } ;
2016-01-09 21:19:18 +01:00
if ( imageView ) {
2020-09-17 00:49:17 +02:00
_dbg_assert_ ( sampler ! = VK_NULL_HANDLE ) ;
2017-11-15 20:43:29 +01:00
tex [ 0 ] . imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL ;
tex [ 0 ] . imageView = imageView ;
2018-04-13 13:47:45 +02:00
tex [ 0 ] . sampler = sampler ;
2016-01-09 11:07:14 +01:00
writes [ n ] . sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET ;
writes [ n ] . pNext = nullptr ;
writes [ n ] . dstBinding = DRAW_BINDING_TEXTURE ;
2017-11-15 20:43:29 +01:00
writes [ n ] . pImageInfo = & tex [ 0 ] ;
2016-01-09 11:07:14 +01:00
writes [ n ] . descriptorCount = 1 ;
writes [ n ] . descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ;
writes [ n ] . dstSet = desc ;
n + + ;
}
2016-01-09 01:23:32 +01:00
2017-10-31 12:02:10 +01:00
if ( boundSecondary_ ) {
2022-02-19 20:40:27 +01:00
tex [ 1 ] . imageLayout = key . secondaryIsInputAttachment ? VK_IMAGE_LAYOUT_GENERAL : VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL ;
2017-11-15 20:43:29 +01:00
tex [ 1 ] . imageView = boundSecondary_ ;
2022-08-24 09:31:47 +02:00
tex [ 1 ] . sampler = samplerSecondaryNearest_ ;
2017-10-31 12:02:10 +01:00
writes [ n ] . sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET ;
writes [ n ] . pNext = nullptr ;
2022-02-19 20:40:27 +01:00
writes [ n ] . dstBinding = key . secondaryIsInputAttachment ? DRAW_BINDING_INPUT_ATTACHMENT : DRAW_BINDING_2ND_TEXTURE ;
2017-11-15 20:43:29 +01:00
writes [ n ] . pImageInfo = & tex [ 1 ] ;
2017-10-31 12:02:10 +01:00
writes [ n ] . descriptorCount = 1 ;
2022-02-19 20:40:27 +01:00
writes [ n ] . descriptorType = key . secondaryIsInputAttachment ? VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT : VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ;
2017-10-31 12:02:10 +01:00
writes [ n ] . dstSet = desc ;
n + + ;
}
2018-04-13 12:25:57 +02:00
if ( boundDepal_ ) {
tex [ 2 ] . imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL ;
tex [ 2 ] . imageView = boundDepal_ ;
2022-08-24 09:31:47 +02:00
tex [ 2 ] . sampler = boundDepalSmoothed_ ? samplerSecondaryLinear_ : samplerSecondaryNearest_ ;
2018-04-13 12:25:57 +02:00
writes [ n ] . sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET ;
writes [ n ] . pNext = nullptr ;
writes [ n ] . dstBinding = DRAW_BINDING_DEPAL_TEXTURE ;
writes [ n ] . pImageInfo = & tex [ 2 ] ;
writes [ n ] . descriptorCount = 1 ;
writes [ n ] . descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ;
writes [ n ] . dstSet = desc ;
n + + ;
}
2017-02-25 18:32:39 +09:00
2018-07-11 01:09:20 +09:00
// Tessellation data buffer.
2017-11-12 10:17:49 +01:00
if ( tess ) {
2018-07-11 01:09:20 +09:00
const VkDescriptorBufferInfo * bufInfo = tessDataTransferVulkan - > GetBufferInfo ( ) ;
// Control Points
2017-11-12 12:07:33 +01:00
writes [ n ] . sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET ;
writes [ n ] . pNext = nullptr ;
writes [ n ] . dstBinding = DRAW_BINDING_TESS_STORAGE_BUF ;
2018-07-11 01:09:20 +09:00
writes [ n ] . pBufferInfo = & bufInfo [ 0 ] ;
writes [ n ] . descriptorCount = 1 ;
writes [ n ] . descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER ;
writes [ n ] . dstSet = desc ;
n + + ;
// Weights U
writes [ n ] . sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET ;
writes [ n ] . pNext = nullptr ;
writes [ n ] . dstBinding = DRAW_BINDING_TESS_STORAGE_BUF_WU ;
writes [ n ] . pBufferInfo = & bufInfo [ 1 ] ;
writes [ n ] . descriptorCount = 1 ;
writes [ n ] . descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER ;
writes [ n ] . dstSet = desc ;
n + + ;
// Weights V
writes [ n ] . sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET ;
writes [ n ] . pNext = nullptr ;
writes [ n ] . dstBinding = DRAW_BINDING_TESS_STORAGE_BUF_WV ;
writes [ n ] . pBufferInfo = & bufInfo [ 2 ] ;
2017-11-12 12:07:33 +01:00
writes [ n ] . descriptorCount = 1 ;
writes [ n ] . descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER ;
writes [ n ] . dstSet = desc ;
n + + ;
2017-02-25 18:32:39 +09:00
}
2016-01-09 11:07:14 +01:00
// Uniform buffer objects
2018-04-10 12:22:02 +02:00
VkDescriptorBufferInfo buf [ 3 ] { } ;
2016-03-20 16:33:34 +01:00
int count = 0 ;
buf [ count ] . buffer = base ;
buf [ count ] . offset = 0 ;
buf [ count ] . range = sizeof ( UB_VS_FS_Base ) ;
count + + ;
buf [ count ] . buffer = light ;
buf [ count ] . offset = 0 ;
buf [ count ] . range = sizeof ( UB_VS_Lights ) ;
count + + ;
2018-04-10 12:22:02 +02:00
buf [ count ] . buffer = bone ;
buf [ count ] . offset = 0 ;
buf [ count ] . range = sizeof ( UB_VS_Bones ) ;
count + + ;
2016-03-20 16:33:34 +01:00
for ( int i = 0 ; i < count ; i + + ) {
2016-01-25 00:58:47 +01:00
writes [ n ] . sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET ;
writes [ n ] . pNext = nullptr ;
writes [ n ] . dstBinding = DRAW_BINDING_DYNUBO_BASE + i ;
2016-03-20 16:33:34 +01:00
writes [ n ] . dstArrayElement = 0 ;
2016-01-25 00:58:47 +01:00
writes [ n ] . pBufferInfo = & buf [ i ] ;
writes [ n ] . dstSet = desc ;
writes [ n ] . descriptorCount = 1 ;
writes [ n ] . descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ;
n + + ;
}
2016-01-09 11:07:14 +01:00
2021-12-09 18:39:55 -08:00
VulkanContext * vulkan = ( VulkanContext * ) draw_ - > GetNativeObject ( Draw : : NativeObject : : CONTEXT ) ;
2021-11-14 15:25:28 -08:00
vkUpdateDescriptorSets ( vulkan - > GetDevice ( ) , n , writes , 0 , nullptr ) ;
2016-01-09 01:23:32 +01:00
2017-11-13 10:29:01 +01:00
if ( ! tess ) // Again, avoid caching when HW tessellation.
2018-02-24 10:24:11 +01:00
frame . descSets . Insert ( key , desc ) ;
2016-01-09 11:07:14 +01:00
return desc ;
2016-01-03 18:31:03 +01:00
}
2016-03-20 16:33:34 +01:00
void DrawEngineVulkan : : DirtyAllUBOs ( ) {
baseUBOOffset = 0 ;
lightUBOOffset = 0 ;
2018-04-10 12:22:02 +02:00
boneUBOOffset = 0 ;
2016-03-20 16:33:34 +01:00
baseBuf = VK_NULL_HANDLE ;
lightBuf = VK_NULL_HANDLE ;
2018-04-10 12:22:02 +02:00
boneBuf = VK_NULL_HANDLE ;
dirtyUniforms_ = DIRTY_BASE_UNIFORMS | DIRTY_LIGHT_UNIFORMS | DIRTY_BONE_UNIFORMS ;
2016-03-20 22:46:49 +01:00
imageView = VK_NULL_HANDLE ;
sampler = VK_NULL_HANDLE ;
2017-01-24 09:41:38 +01:00
gstate_c . Dirty ( DIRTY_TEXTURE_IMAGE ) ;
2016-03-20 16:33:34 +01:00
}
2017-08-17 11:22:23 +02:00
void MarkUnreliable ( VertexArrayInfoVulkan * vai ) {
vai - > status = VertexArrayInfoVulkan : : VAI_UNRELIABLE ;
// TODO: If we change to a real allocator, free the data here.
// For now we just leave it in the pushbuffer.
}
2022-12-01 19:15:38 +01:00
void DrawEngineVulkan : : Invalidate ( InvalidationCallbackFlags flags ) {
if ( flags & InvalidationCallbackFlags : : COMMAND_BUFFER_STATE ) {
2022-12-16 13:03:44 +01:00
// Nothing here anymore (removed the "frame descriptor set"
// If we add back "seldomly-changing" descriptors, we might use this again.
2022-11-24 10:38:49 +01:00
}
2022-12-01 19:15:38 +01:00
if ( flags & InvalidationCallbackFlags : : RENDER_PASS_STATE ) {
2022-11-24 10:38:49 +01:00
// If have a new render pass, dirty our dynamic state so it gets re-set.
//
// Dirty everything that has dynamic state that will need re-recording.
gstate_c . Dirty ( DIRTY_VIEWPORTSCISSOR_STATE | DIRTY_DEPTHSTENCIL_STATE | DIRTY_BLEND_STATE | DIRTY_TEXTURE_IMAGE | DIRTY_TEXTURE_PARAMS ) ;
lastPipeline_ = nullptr ;
}
}
2017-08-18 13:39:42 +02:00
// The inline wrapper in the header checks for numDrawCalls == 0
2017-05-19 17:21:08 +02:00
void DrawEngineVulkan : : DoFlush ( ) {
2022-08-05 21:11:33 +02:00
VulkanRenderManager * renderManager = ( VulkanRenderManager * ) draw_ - > GetNativeObject ( Draw : : NativeObject : : RENDER_MANAGER ) ;
2017-08-18 13:39:42 +02:00
PROFILE_THIS_SCOPE ( " Flush " ) ;
2022-08-05 21:11:33 +02:00
FrameData & frameData = GetCurFrame ( ) ;
2017-05-24 00:45:15 +02:00
2020-12-13 16:04:16 +01:00
bool tess = gstate_c . submitType = = SubmitType : : HW_BEZIER | | gstate_c . submitType = = SubmitType : : HW_SPLINE ;
2017-11-13 10:29:01 +01:00
2016-05-01 16:25:09 -07:00
bool textureNeedsApply = false ;
2017-01-24 09:41:38 +01:00
if ( gstate_c . IsDirty ( DIRTY_TEXTURE_IMAGE | DIRTY_TEXTURE_PARAMS ) & & ! gstate . isModeClear ( ) & & gstate . isTextureMapEnabled ( ) ) {
2016-05-01 17:27:14 -07:00
textureCache_ - > SetTexture ( ) ;
2017-01-24 09:41:38 +01:00
gstate_c . Clean ( DIRTY_TEXTURE_IMAGE | DIRTY_TEXTURE_PARAMS ) ;
2016-05-01 16:25:09 -07:00
textureNeedsApply = true ;
2022-10-02 20:44:35 -07:00
} else if ( gstate . getTextureAddress ( 0 ) = = ( gstate . getFrameBufRawAddress ( ) | 0x04000000 ) ) {
2018-04-22 11:16:33 -07:00
// This catches the case of clearing a texture.
gstate_c . Dirty ( DIRTY_TEXTURE_IMAGE ) ;
2016-01-09 21:19:18 +01:00
}
2015-10-10 16:41:19 +02:00
GEPrimitiveType prim = prevPrim_ ;
2016-01-09 11:07:14 +01:00
2018-04-28 16:38:29 -07:00
// Always use software for flat shading to fix the provoking index.
2018-06-28 19:30:20 -07:00
bool useHWTransform = CanUseHardwareTransform ( prim ) & & ( tess | | gstate . getShadeMode ( ) ! = GE_SHADE_FLAT ) ;
2015-10-10 16:41:19 +02:00
2017-08-17 22:51:20 +02:00
uint32_t ibOffset ;
uint32_t vbOffset ;
2016-01-03 18:31:03 +01:00
2023-01-05 15:37:42 +01:00
// The optimization to avoid indexing isn't really worth it on Vulkan since it means creating more pipelines.
// This could be avoided with the new dynamic state extensions, but not available enough on mobile.
2023-01-11 22:59:30 +01:00
const bool forceIndexed = draw_ - > GetDeviceCaps ( ) . verySlowShaderCompiler ;
2023-01-05 15:37:42 +01:00
2016-01-09 11:07:14 +01:00
if ( useHWTransform ) {
2015-10-10 16:41:19 +02:00
int vertexCount = 0 ;
bool useElements = true ;
2017-06-02 11:47:14 +02:00
// Cannot cache vertex data with morph enabled.
bool useCache = g_Config . bVertexCache & & ! ( lastVType_ & GE_VTYPE_MORPHCOUNT_MASK ) ;
2018-04-10 11:50:24 +02:00
// Also avoid caching when software skinning.
2017-08-17 11:22:23 +02:00
VkBuffer vbuf = VK_NULL_HANDLE ;
VkBuffer ibuf = VK_NULL_HANDLE ;
2022-11-06 08:40:54 -08:00
if ( decOptions_ . applySkinInDecode & & ( lastVType_ & GE_VTYPE_WEIGHT_MASK ) ) {
2017-08-17 11:22:23 +02:00
useCache = false ;
2017-06-02 11:47:14 +02:00
}
if ( useCache ) {
2017-08-18 13:39:42 +02:00
PROFILE_THIS_SCOPE ( " vcache " ) ;
2017-06-02 11:47:14 +02:00
u32 id = dcid_ ^ gstate . getUVGenMode ( ) ; // This can have an effect on which UV decoder we need to use! And hence what the decoded data will look like. See #9263
2017-08-20 11:30:19 +02:00
VertexArrayInfoVulkan * vai = vai_ . Get ( id ) ;
if ( ! vai ) {
2017-08-17 11:22:23 +02:00
vai = new VertexArrayInfoVulkan ( ) ;
2017-08-20 11:30:19 +02:00
vai_ . Insert ( id , vai ) ;
2017-08-17 11:22:23 +02:00
}
switch ( vai - > status ) {
case VertexArrayInfoVulkan : : VAI_NEW :
{
// Haven't seen this one before. We don't actually upload the vertex data yet.
2020-08-27 20:37:49 -07:00
uint64_t dataHash = ComputeHash ( ) ;
2017-08-17 11:22:23 +02:00
vai - > hash = dataHash ;
vai - > minihash = ComputeMiniHash ( ) ;
vai - > status = VertexArrayInfoVulkan : : VAI_HASHING ;
vai - > drawsUntilNextFullHash = 0 ;
2023-03-15 10:09:39 +01:00
DecodeVertsToPushPool ( pushVertex_ , & vbOffset , & vbuf ) ; // writes to indexGen
2017-08-17 11:22:23 +02:00
vai - > numVerts = indexGen . VertexCount ( ) ;
vai - > prim = indexGen . Prim ( ) ;
vai - > maxIndex = indexGen . MaxIndex ( ) ;
vai - > flags = gstate_c . vertexFullAlpha ? VAIVULKAN_FLAG_VERTEXFULLALPHA : 0 ;
goto rotateVBO ;
}
// Hashing - still gaining confidence about the buffer.
// But if we get this far it's likely to be worth uploading the data.
case VertexArrayInfoVulkan : : VAI_HASHING :
{
2017-08-18 13:39:42 +02:00
PROFILE_THIS_SCOPE ( " vcachehash " ) ;
2017-08-17 11:22:23 +02:00
vai - > numDraws + + ;
if ( vai - > lastFrame ! = gpuStats . numFlips ) {
vai - > numFrames + + ;
}
if ( vai - > drawsUntilNextFullHash = = 0 ) {
// Let's try to skip a full hash if mini would fail.
const u32 newMiniHash = ComputeMiniHash ( ) ;
2020-08-27 20:37:49 -07:00
uint64_t newHash = vai - > hash ;
2017-08-17 11:22:23 +02:00
if ( newMiniHash = = vai - > minihash ) {
newHash = ComputeHash ( ) ;
}
if ( newMiniHash ! = vai - > minihash | | newHash ! = vai - > hash ) {
MarkUnreliable ( vai ) ;
2023-03-15 10:09:39 +01:00
DecodeVertsToPushPool ( pushVertex_ , & vbOffset , & vbuf ) ;
2017-08-17 11:22:23 +02:00
goto rotateVBO ;
}
if ( vai - > numVerts > 64 ) {
// exponential backoff up to 16 draws, then every 24
vai - > drawsUntilNextFullHash = std : : min ( 24 , vai - > numFrames ) ;
} else {
// Lower numbers seem much more likely to change.
vai - > drawsUntilNextFullHash = 0 ;
}
// TODO: tweak
//if (vai->numFrames > 1000) {
// vai->status = VertexArrayInfo::VAI_RELIABLE;
//}
} else {
vai - > drawsUntilNextFullHash - - ;
u32 newMiniHash = ComputeMiniHash ( ) ;
if ( newMiniHash ! = vai - > minihash ) {
MarkUnreliable ( vai ) ;
2023-03-15 10:09:39 +01:00
DecodeVertsToPushPool ( pushVertex_ , & vbOffset , & vbuf ) ;
2017-08-17 11:22:23 +02:00
goto rotateVBO ;
}
}
if ( ! vai - > vb ) {
// Directly push to the vertex cache.
2017-11-19 12:33:20 +01:00
DecodeVertsToPushBuffer ( vertexCache_ , & vai - > vbOffset , & vai - > vb ) ;
2020-07-19 17:47:02 +02:00
_dbg_assert_msg_ ( gstate_c . vertBounds . minV > = gstate_c . vertBounds . maxV , " Should not have checked UVs when caching. " ) ;
2017-08-17 11:22:23 +02:00
vai - > numVerts = indexGen . VertexCount ( ) ;
vai - > maxIndex = indexGen . MaxIndex ( ) ;
vai - > flags = gstate_c . vertexFullAlpha ? VAIVULKAN_FLAG_VERTEXFULLALPHA : 0 ;
2023-01-05 15:37:42 +01:00
if ( forceIndexed ) {
vai - > prim = indexGen . GeneralPrim ( ) ;
useElements = true ;
} else {
vai - > prim = indexGen . Prim ( ) ;
useElements = ! indexGen . SeenOnlyPurePrims ( ) ;
if ( ! useElements & & indexGen . PureCount ( ) ) {
vai - > numVerts = indexGen . PureCount ( ) ;
}
2017-08-17 12:00:10 +02:00
}
2023-01-05 15:37:42 +01:00
2017-08-17 12:00:10 +02:00
if ( useElements ) {
u32 size = sizeof ( uint16_t ) * indexGen . VertexCount ( ) ;
2023-03-15 09:56:32 +01:00
void * dest = vertexCache_ - > Allocate ( size , 4 , & vai - > ib , & vai - > ibOffset ) ;
2017-08-17 12:00:10 +02:00
memcpy ( dest , decIndex , size ) ;
} else {
vai - > ib = VK_NULL_HANDLE ;
vai - > ibOffset = 0 ;
}
2017-08-17 11:22:23 +02:00
} else {
gpuStats . numCachedDrawCalls + + ;
useElements = vai - > ib ? true : false ;
gpuStats . numCachedVertsDrawn + = vai - > numVerts ;
gstate_c . vertexFullAlpha = vai - > flags & VAIVULKAN_FLAG_VERTEXFULLALPHA ;
}
vbuf = vai - > vb ;
ibuf = vai - > ib ;
vbOffset = vai - > vbOffset ;
ibOffset = vai - > ibOffset ;
vertexCount = vai - > numVerts ;
prim = static_cast < GEPrimitiveType > ( vai - > prim ) ;
break ;
}
// Reliable - we don't even bother hashing anymore. Right now we don't go here until after a very long time.
case VertexArrayInfoVulkan : : VAI_RELIABLE :
{
vai - > numDraws + + ;
if ( vai - > lastFrame ! = gpuStats . numFlips ) {
vai - > numFrames + + ;
}
gpuStats . numCachedDrawCalls + + ;
gpuStats . numCachedVertsDrawn + = vai - > numVerts ;
vbuf = vai - > vb ;
ibuf = vai - > ib ;
vbOffset = vai - > vbOffset ;
ibOffset = vai - > ibOffset ;
vertexCount = vai - > numVerts ;
prim = static_cast < GEPrimitiveType > ( vai - > prim ) ;
gstate_c . vertexFullAlpha = vai - > flags & VAIVULKAN_FLAG_VERTEXFULLALPHA ;
break ;
}
case VertexArrayInfoVulkan : : VAI_UNRELIABLE :
{
vai - > numDraws + + ;
if ( vai - > lastFrame ! = gpuStats . numFlips ) {
vai - > numFrames + + ;
}
2023-03-15 10:09:39 +01:00
DecodeVertsToPushPool ( pushVertex_ , & vbOffset , & vbuf ) ;
2017-08-17 11:22:23 +02:00
goto rotateVBO ;
}
default :
break ;
}
} else {
2022-11-06 08:40:54 -08:00
if ( decOptions_ . applySkinInDecode & & ( lastVType_ & GE_VTYPE_WEIGHT_MASK ) ) {
2018-04-10 11:50:24 +02:00
// If software skinning, we've already predecoded into "decoded". So push that content.
2017-08-17 11:22:23 +02:00
VkDeviceSize size = decodedVerts_ * dec_ - > GetDecVtxFmt ( ) . stride ;
2023-03-15 10:09:39 +01:00
u8 * dest = ( u8 * ) pushVertex_ - > Allocate ( size , 4 , & vbuf , & vbOffset ) ;
2017-08-17 11:22:23 +02:00
memcpy ( dest , decoded , size ) ;
} else {
// Decode directly into the pushbuffer
2023-03-15 10:09:39 +01:00
DecodeVertsToPushPool ( pushVertex_ , & vbOffset , & vbuf ) ;
2017-08-17 11:22:23 +02:00
}
2017-06-02 11:47:14 +02:00
2017-08-17 11:22:23 +02:00
rotateVBO :
gpuStats . numUncachedVertsDrawn + = indexGen . VertexCount ( ) ;
2023-01-05 15:37:42 +01:00
2017-08-17 11:22:23 +02:00
vertexCount = indexGen . VertexCount ( ) ;
2023-01-05 15:37:42 +01:00
if ( forceIndexed ) {
useElements = true ;
prim = indexGen . GeneralPrim ( ) ;
} else {
useElements = ! indexGen . SeenOnlyPurePrims ( ) ;
if ( ! useElements & & indexGen . PureCount ( ) ) {
vertexCount = indexGen . PureCount ( ) ;
}
prim = indexGen . Prim ( ) ;
2017-08-17 11:22:23 +02:00
}
2015-10-10 16:41:19 +02:00
}
2017-05-21 23:13:53 +02:00
bool hasColor = ( lastVType_ & GE_VTYPE_COL_MASK ) ! = GE_VTYPE_COL_NONE ;
2015-10-10 16:41:19 +02:00
if ( gstate . isModeThrough ( ) ) {
gstate_c . vertexFullAlpha = gstate_c . vertexFullAlpha & & ( hasColor | | gstate . getMaterialAmbientA ( ) = = 255 ) ;
} else {
gstate_c . vertexFullAlpha = gstate_c . vertexFullAlpha & & ( ( hasColor & & ( gstate . materialupdate & 1 ) ) | | gstate . getMaterialAmbientA ( ) = = 255 ) & & ( ! gstate . isLightingEnabled ( ) | | gstate . getAmbientA ( ) = = 255 ) ;
}
2016-05-01 16:25:09 -07:00
if ( textureNeedsApply ) {
2017-02-19 23:19:55 +01:00
textureCache_ - > ApplyTexture ( ) ;
textureCache_ - > GetVulkanHandles ( imageView , sampler ) ;
2016-05-01 16:25:09 -07:00
if ( imageView = = VK_NULL_HANDLE )
2022-10-28 18:40:55 +02:00
imageView = ( VkImageView ) draw_ - > GetNativeObject ( gstate_c . arrayTexture ? Draw : : NativeObject : : NULL_IMAGEVIEW_ARRAY : Draw : : NativeObject : : NULL_IMAGEVIEW ) ;
2016-05-01 16:25:09 -07:00
if ( sampler = = VK_NULL_HANDLE )
sampler = nullSampler_ ;
}
2022-10-01 19:22:16 -07:00
if ( ! lastPipeline_ | | gstate_c . IsDirty ( DIRTY_BLEND_STATE | DIRTY_VIEWPORTSCISSOR_STATE | DIRTY_RASTER_STATE | DIRTY_DEPTHSTENCIL_STATE | DIRTY_VERTEXSHADER_STATE | DIRTY_FRAGMENTSHADER_STATE | DIRTY_GEOMETRYSHADER_STATE ) | | prim ! = lastPrim_ ) {
2017-08-15 16:01:50 +02:00
if ( prim ! = lastPrim_ | | gstate_c . IsDirty ( DIRTY_BLEND_STATE | DIRTY_VIEWPORTSCISSOR_STATE | DIRTY_RASTER_STATE | DIRTY_DEPTHSTENCIL_STATE ) ) {
ConvertStateToVulkanKey ( * framebufferManager_ , shaderManager_ , prim , pipelineKey_ , dynState_ ) ;
}
2020-03-29 14:51:54 +02:00
2022-10-23 11:21:35 +02:00
VulkanVertexShader * vshader = nullptr ;
VulkanFragmentShader * fshader = nullptr ;
VulkanGeometryShader * gshader = nullptr ;
2022-12-14 22:48:17 +01:00
shaderManager_ - > GetShaders ( prim , dec_ , & vshader , & fshader , & gshader , pipelineState_ , true , useHWTessellation_ , decOptions_ . expandAllWeightsToFloat , decOptions_ . applySkinInDecode ) ;
2021-09-03 00:06:49 +02:00
if ( ! vshader ) {
// We're screwed.
return ;
}
2020-07-19 17:47:02 +02:00
_dbg_assert_msg_ ( vshader - > UseHWTransform ( ) , " Bad vshader " ) ;
2020-03-29 14:51:54 +02:00
2023-01-13 10:14:29 +01:00
VulkanPipeline * pipeline = pipelineManager_ - > GetOrCreatePipeline ( renderManager , pipelineLayout_ , pipelineKey_ , & dec_ - > decFmt , vshader , fshader , gshader , true , 0 , framebufferManager_ - > GetMSAALevel ( ) , false ) ;
2018-03-19 17:46:58 +01:00
if ( ! pipeline | | ! pipeline - > pipeline ) {
2017-08-15 16:01:50 +02:00
// Already logged, let's bail out.
return ;
}
2017-11-15 20:43:29 +01:00
BindShaderBlendTex ( ) ; // This might cause copies so important to do before BindPipeline.
2020-06-02 09:50:20 +02:00
2022-02-19 20:40:27 +01:00
renderManager - > BindPipeline ( pipeline - > pipeline , pipeline - > pipelineFlags , pipelineLayout_ ) ;
2017-08-15 16:01:50 +02:00
if ( pipeline ! = lastPipeline_ ) {
2018-03-16 17:38:02 +01:00
if ( lastPipeline_ & & ! ( lastPipeline_ - > UsesBlendConstant ( ) & & pipeline - > UsesBlendConstant ( ) ) ) {
2017-10-20 18:09:05 +02:00
gstate_c . Dirty ( DIRTY_BLEND_STATE ) ;
}
2017-10-22 10:07:35 +02:00
lastPipeline_ = pipeline ;
2017-08-15 16:01:50 +02:00
}
2018-03-16 17:38:02 +01:00
ApplyDrawStateLate ( renderManager , false , 0 , pipeline - > UsesBlendConstant ( ) ) ;
2017-08-15 16:01:50 +02:00
gstate_c . Clean ( DIRTY_BLEND_STATE | DIRTY_DEPTHSTENCIL_STATE | DIRTY_RASTER_STATE | DIRTY_VIEWPORTSCISSOR_STATE ) ;
2022-08-28 08:34:48 -07:00
gstate_c . Dirty ( dirtyRequiresRecheck_ ) ;
dirtyRequiresRecheck_ = 0 ;
2017-08-22 13:25:45 +02:00
lastPipeline_ = pipeline ;
2017-08-15 16:01:50 +02:00
}
lastPrim_ = prim ;
2016-03-20 16:33:34 +01:00
2020-04-04 11:03:07 -07:00
dirtyUniforms_ | = shaderManager_ - > UpdateUniforms ( framebufferManager_ - > UseBufferedRendering ( ) ) ;
2022-08-05 21:11:33 +02:00
UpdateUBOs ( & frameData ) ;
2016-01-09 01:23:32 +01:00
2018-04-10 12:22:02 +02:00
VkDescriptorSet ds = GetOrCreateDescriptorSet ( imageView , sampler , baseBuf , lightBuf , boneBuf , tess ) ;
2016-03-20 09:52:13 +01:00
2018-04-10 12:22:02 +02:00
const uint32_t dynamicUBOOffsets [ 3 ] = {
baseUBOOffset , lightUBOOffset , boneUBOOffset ,
2016-01-03 18:31:03 +01:00
} ;
2015-10-10 16:41:19 +02:00
if ( useElements ) {
2020-12-13 00:20:47 +01:00
if ( ! ibuf ) {
2023-03-15 10:09:39 +01:00
ibOffset = ( uint32_t ) pushIndex_ - > Push ( decIndex , sizeof ( uint16_t ) * indexGen . VertexCount ( ) , 4 , & ibuf ) ;
2020-12-13 00:20:47 +01:00
}
2022-09-01 14:21:34 +02:00
renderManager - > DrawIndexed ( ds , ARRAY_SIZE ( dynamicUBOOffsets ) , dynamicUBOOffsets , vbuf , vbOffset , ibuf , ibOffset , vertexCount , 1 , VK_INDEX_TYPE_UINT16 ) ;
2015-10-10 16:41:19 +02:00
} else {
2022-09-01 14:21:34 +02:00
renderManager - > Draw ( ds , ARRAY_SIZE ( dynamicUBOOffsets ) , dynamicUBOOffsets , vbuf , vbOffset , vertexCount ) ;
2015-10-10 16:41:19 +02:00
}
} else {
2017-08-18 13:39:42 +02:00
PROFILE_THIS_SCOPE ( " soft " ) ;
2022-11-09 07:07:39 -08:00
if ( ! decOptions_ . applySkinInDecode ) {
decOptions_ . applySkinInDecode = true ;
lastVType_ | = ( 1 < < 26 ) ;
dec_ = GetVertexDecoder ( lastVType_ ) ;
}
2022-08-05 21:11:33 +02:00
DecodeVerts ( decoded ) ;
2017-05-21 23:13:53 +02:00
bool hasColor = ( lastVType_ & GE_VTYPE_COL_MASK ) ! = GE_VTYPE_COL_NONE ;
2015-10-10 16:41:19 +02:00
if ( gstate . isModeThrough ( ) ) {
gstate_c . vertexFullAlpha = gstate_c . vertexFullAlpha & & ( hasColor | | gstate . getMaterialAmbientA ( ) = = 255 ) ;
} else {
gstate_c . vertexFullAlpha = gstate_c . vertexFullAlpha & & ( ( hasColor & & ( gstate . materialupdate & 1 ) ) | | gstate . getMaterialAmbientA ( ) = = 255 ) & & ( ! gstate . isLightingEnabled ( ) | | gstate . getAmbientA ( ) = = 255 ) ;
}
gpuStats . numUncachedVertsDrawn + = indexGen . VertexCount ( ) ;
prim = indexGen . Prim ( ) ;
// Undo the strip optimization, not supported by the SW code yet.
if ( prim = = GE_PRIM_TRIANGLE_STRIP )
prim = GE_PRIM_TRIANGLES ;
2023-05-02 13:09:47 +02:00
u16 * const inds = decIndex ;
2017-12-02 09:58:13 +01:00
SoftwareTransformResult result { } ;
SoftwareTransformParams params { } ;
2016-03-12 13:37:08 -08:00
params . decoded = decoded ;
params . transformed = transformed ;
params . transformedExpanded = transformedExpanded ;
params . fbman = framebufferManager_ ;
params . texCache = textureCache_ ;
2022-08-05 21:11:33 +02:00
// In Vulkan, we have to force drawing of primitives if !framebufferManager_->UseBufferedRendering() because Vulkan clears
2017-12-02 09:58:13 +01:00
// do not respect scissor rects.
2020-04-04 10:51:47 -07:00
params . allowClear = framebufferManager_ - > UseBufferedRendering ( ) ;
2016-03-12 13:37:08 -08:00
params . allowSeparateAlphaClear = false ;
2018-04-28 16:32:09 -07:00
params . provokeFlatFirst = true ;
2021-10-23 11:59:34 -07:00
params . flippedY = true ;
2021-10-23 14:20:44 -07:00
params . usesHalfZ = true ;
2016-03-12 13:37:08 -08:00
2020-01-26 15:30:20 +01:00
// We need to update the viewport early because it's checked for flipping in SoftwareTransform.
// We don't have a "DrawStateEarly" in vulkan, so...
// TODO: Probably should eventually refactor this and feed the vp size into SoftwareTransform directly (Unknown's idea).
if ( gstate_c . IsDirty ( DIRTY_VIEWPORTSCISSOR_STATE ) ) {
2021-10-30 17:30:05 -07:00
ViewportAndScissor vpAndScissor ;
ConvertViewportAndScissor ( framebufferManager_ - > UseBufferedRendering ( ) ,
framebufferManager_ - > GetRenderWidth ( ) , framebufferManager_ - > GetRenderHeight ( ) ,
framebufferManager_ - > GetTargetBufferWidth ( ) , framebufferManager_ - > GetTargetBufferHeight ( ) ,
vpAndScissor ) ;
2022-08-20 14:16:55 -07:00
UpdateCachedViewportState ( vpAndScissor ) ;
2020-01-26 15:30:20 +01:00
}
2015-10-10 16:41:19 +02:00
int maxIndex = indexGen . MaxIndex ( ) ;
2020-05-08 00:26:41 -07:00
SoftwareTransform swTransform ( params ) ;
2021-10-23 11:59:34 -07:00
const Lin : : Vec3 trans ( gstate_c . vpXOffset , gstate_c . vpYOffset , gstate_c . vpZOffset * 0.5f + 0.5f ) ;
const Lin : : Vec3 scale ( gstate_c . vpWidthScale , gstate_c . vpHeightScale , gstate_c . vpDepthScale * 0.5f ) ;
swTransform . SetProjMatrix ( gstate . projMatrix , gstate_c . vpWidth < 0 , gstate_c . vpHeight < 0 , trans , scale ) ;
2020-05-08 00:26:41 -07:00
swTransform . Decode ( prim , dec_ - > VertexType ( ) , dec_ - > GetDecVtxFmt ( ) , maxIndex , & result ) ;
2022-11-09 20:34:29 -08:00
// Non-zero depth clears are unusual, but some drivers don't match drawn depth values to cleared values.
// Games sometimes expect exact matches (see #12626, for example) for equal comparisons.
if ( result . action = = SW_CLEAR & & everUsedEqualDepth_ & & gstate . isClearModeDepthMask ( ) & & result . depth > 0.0f & & result . depth < 1.0f )
result . action = SW_NOT_READY ;
2023-05-02 13:09:47 +02:00
int indsOffset = 0 ;
2020-05-08 00:26:41 -07:00
if ( result . action = = SW_NOT_READY ) {
swTransform . DetectOffsetTexture ( maxIndex ) ;
2023-05-02 13:21:47 +02:00
swTransform . BuildDrawingParams ( prim , indexGen . VertexCount ( ) , dec_ - > VertexType ( ) , inds , indsOffset , DECODED_INDEX_BUFFER_SIZE / sizeof ( uint16_t ) , maxIndex , & result ) ;
2020-05-08 00:26:41 -07:00
}
2015-10-10 16:41:19 +02:00
2020-05-23 00:25:39 -07:00
if ( result . setSafeSize )
framebufferManager_ - > SetSafeSize ( result . safeWidth , result . safeHeight ) ;
2016-01-24 17:30:26 +01:00
// Only here, where we know whether to clear or to draw primitives, should we actually set the current framebuffer! Because that gives use the opportunity
// to use a "pre-clear" render pass, for high efficiency on tilers.
2015-10-10 16:41:19 +02:00
if ( result . action = = SW_DRAW_PRIMITIVES ) {
2016-05-01 16:25:09 -07:00
if ( textureNeedsApply ) {
2017-02-19 23:19:55 +01:00
textureCache_ - > ApplyTexture ( ) ;
textureCache_ - > GetVulkanHandles ( imageView , sampler ) ;
2016-05-01 16:25:09 -07:00
if ( imageView = = VK_NULL_HANDLE )
2022-10-28 18:40:55 +02:00
imageView = ( VkImageView ) draw_ - > GetNativeObject ( gstate_c . arrayTexture ? Draw : : NativeObject : : NULL_IMAGEVIEW_ARRAY : Draw : : NativeObject : : NULL_IMAGEVIEW ) ;
2016-05-01 16:25:09 -07:00
if ( sampler = = VK_NULL_HANDLE )
sampler = nullSampler_ ;
}
2022-10-01 19:22:16 -07:00
if ( ! lastPipeline_ | | gstate_c . IsDirty ( DIRTY_BLEND_STATE | DIRTY_VIEWPORTSCISSOR_STATE | DIRTY_RASTER_STATE | DIRTY_DEPTHSTENCIL_STATE | DIRTY_VERTEXSHADER_STATE | DIRTY_FRAGMENTSHADER_STATE | DIRTY_GEOMETRYSHADER_STATE ) | | prim ! = lastPrim_ ) {
2017-08-18 13:39:42 +02:00
if ( prim ! = lastPrim_ | | gstate_c . IsDirty ( DIRTY_BLEND_STATE | DIRTY_VIEWPORTSCISSOR_STATE | DIRTY_RASTER_STATE | DIRTY_DEPTHSTENCIL_STATE ) ) {
ConvertStateToVulkanKey ( * framebufferManager_ , shaderManager_ , prim , pipelineKey_ , dynState_ ) ;
}
2022-10-23 11:21:35 +02:00
VulkanVertexShader * vshader = nullptr ;
VulkanFragmentShader * fshader = nullptr ;
VulkanGeometryShader * gshader = nullptr ;
2022-12-14 22:48:17 +01:00
shaderManager_ - > GetShaders ( prim , dec_ , & vshader , & fshader , & gshader , pipelineState_ , false , false , decOptions_ . expandAllWeightsToFloat , true ) ;
2022-09-02 22:40:15 +02:00
_dbg_assert_msg_ ( ! vshader - > UseHWTransform ( ) , " Bad vshader " ) ;
2023-01-13 10:14:29 +01:00
VulkanPipeline * pipeline = pipelineManager_ - > GetOrCreatePipeline ( renderManager , pipelineLayout_ , pipelineKey_ , & dec_ - > decFmt , vshader , fshader , gshader , false , 0 , framebufferManager_ - > GetMSAALevel ( ) , false ) ;
2018-03-19 17:46:58 +01:00
if ( ! pipeline | | ! pipeline - > pipeline ) {
2017-08-18 13:39:42 +02:00
// Already logged, let's bail out.
2022-01-09 21:34:05 -08:00
decodedVerts_ = 0 ;
numDrawCalls = 0 ;
decodeCounter_ = 0 ;
2022-11-06 08:55:07 -08:00
decOptions_ . applySkinInDecode = g_Config . bSoftwareSkinning ;
2017-08-18 13:39:42 +02:00
return ;
}
2017-11-15 20:43:29 +01:00
BindShaderBlendTex ( ) ; // This might cause copies so super important to do before BindPipeline.
2020-06-02 09:50:20 +02:00
2022-02-19 20:40:27 +01:00
renderManager - > BindPipeline ( pipeline - > pipeline , pipeline - > pipelineFlags , pipelineLayout_ ) ;
2017-08-18 13:39:42 +02:00
if ( pipeline ! = lastPipeline_ ) {
2018-03-16 17:38:02 +01:00
if ( lastPipeline_ & & ! lastPipeline_ - > UsesBlendConstant ( ) & & pipeline - > UsesBlendConstant ( ) ) {
2017-10-20 18:09:05 +02:00
gstate_c . Dirty ( DIRTY_BLEND_STATE ) ;
}
2017-10-22 10:07:35 +02:00
lastPipeline_ = pipeline ;
2017-08-18 13:39:42 +02:00
}
2018-03-16 17:38:02 +01:00
ApplyDrawStateLate ( renderManager , result . setStencil , result . stencilValue , pipeline - > UsesBlendConstant ( ) ) ;
2017-08-18 13:39:42 +02:00
gstate_c . Clean ( DIRTY_BLEND_STATE | DIRTY_DEPTHSTENCIL_STATE | DIRTY_RASTER_STATE | DIRTY_VIEWPORTSCISSOR_STATE ) ;
2022-08-28 08:34:48 -07:00
gstate_c . Dirty ( dirtyRequiresRecheck_ ) ;
dirtyRequiresRecheck_ = 0 ;
2017-08-22 13:25:45 +02:00
lastPipeline_ = pipeline ;
2016-03-12 09:21:13 -08:00
}
2022-08-05 21:11:33 +02:00
2017-08-18 13:39:42 +02:00
lastPrim_ = prim ;
2016-01-09 01:23:32 +01:00
2020-04-04 11:03:07 -07:00
dirtyUniforms_ | = shaderManager_ - > UpdateUniforms ( framebufferManager_ - > UseBufferedRendering ( ) ) ;
2017-08-15 16:01:50 +02:00
2016-03-20 16:33:34 +01:00
// Even if the first draw is through-mode, make sure we at least have one copy of these uniforms buffered
2022-08-05 21:11:33 +02:00
UpdateUBOs ( & frameData ) ;
2016-01-09 01:23:32 +01:00
2018-04-10 12:22:02 +02:00
VkDescriptorSet ds = GetOrCreateDescriptorSet ( imageView , sampler , baseBuf , lightBuf , boneBuf , tess ) ;
const uint32_t dynamicUBOOffsets [ 3 ] = {
baseUBOOffset , lightUBOOffset , boneUBOOffset ,
2016-01-09 01:23:32 +01:00
} ;
2017-08-18 13:39:42 +02:00
2017-08-22 13:25:45 +02:00
PROFILE_THIS_SCOPE ( " renderman_q " ) ;
2016-01-09 01:23:32 +01:00
2020-05-08 00:26:41 -07:00
if ( result . drawIndexed ) {
2016-03-20 09:52:13 +01:00
VkBuffer vbuf , ibuf ;
2023-03-15 10:09:39 +01:00
vbOffset = ( uint32_t ) pushVertex_ - > Push ( result . drawBuffer , maxIndex * sizeof ( TransformedVertex ) , 4 , & vbuf ) ;
2023-05-02 13:09:47 +02:00
ibOffset = ( uint32_t ) pushIndex_ - > Push ( inds + indsOffset , sizeof ( short ) * result . drawNumTrans , 4 , & ibuf ) ;
2022-09-01 14:21:34 +02:00
renderManager - > DrawIndexed ( ds , ARRAY_SIZE ( dynamicUBOOffsets ) , dynamicUBOOffsets , vbuf , vbOffset , ibuf , ibOffset , result . drawNumTrans , 1 , VK_INDEX_TYPE_UINT16 ) ;
2023-05-02 13:21:47 +02:00
} else if ( result . drawNumTrans > 0 ) {
2016-03-20 09:52:13 +01:00
VkBuffer vbuf ;
2023-03-15 10:09:39 +01:00
vbOffset = ( uint32_t ) pushVertex_ - > Push ( result . drawBuffer , result . drawNumTrans * sizeof ( TransformedVertex ) , 4 , & vbuf ) ;
2022-09-01 14:21:34 +02:00
renderManager - > Draw ( ds , ARRAY_SIZE ( dynamicUBOOffsets ) , dynamicUBOOffsets , vbuf , vbOffset , result . drawNumTrans ) ;
2015-10-10 16:41:19 +02:00
}
} else if ( result . action = = SW_CLEAR ) {
2016-03-12 13:37:08 -08:00
// Note: we won't get here if the clear is alpha but not color, or color but not alpha.
2023-02-25 18:31:13 +01:00
bool clearColor = gstate . isClearModeColorMask ( ) ;
bool clearAlpha = gstate . isClearModeAlphaMask ( ) ; // and stencil
bool clearDepth = gstate . isClearModeDepthMask ( ) ;
int mask = 0 ;
// The Clear detection takes care of doing a regular draw instead if separate masking
// of color and alpha is needed, so we can just treat them as the same.
if ( clearColor | | clearAlpha ) mask | = Draw : : FBChannel : : FB_COLOR_BIT ;
if ( clearDepth ) mask | = Draw : : FBChannel : : FB_DEPTH_BIT ;
if ( clearAlpha ) mask | = Draw : : FBChannel : : FB_STENCIL_BIT ;
// Note that since the alpha channel and the stencil channel are shared on the PSP,
// when we clear alpha, we also clear stencil to the same value.
draw_ - > Clear ( mask , result . color , result . depth , result . color > > 24 ) ;
if ( clearColor | | clearAlpha ) {
framebufferManager_ - > SetColorUpdated ( gstate_c . skipDrawReason ) ;
}
2022-10-17 08:27:49 +02:00
if ( gstate_c . Use ( GPU_USE_CLEAR_RAM_HACK ) & & gstate . isClearModeColorMask ( ) & & ( gstate . isClearModeAlphaMask ( ) | | gstate . FrameBufFormat ( ) = = GE_FORMAT_565 ) ) {
2020-05-23 00:25:39 -07:00
int scissorX1 = gstate . getScissorX1 ( ) ;
int scissorY1 = gstate . getScissorY1 ( ) ;
int scissorX2 = gstate . getScissorX2 ( ) + 1 ;
int scissorY2 = gstate . getScissorY2 ( ) + 1 ;
2017-04-09 15:10:07 -07:00
framebufferManager_ - > ApplyClearToMemory ( scissorX1 , scissorY1 , scissorX2 , scissorY2 , result . color ) ;
2016-09-18 19:40:44 -07:00
}
2015-10-10 16:41:19 +02:00
}
2022-11-06 08:55:07 -08:00
decOptions_ . applySkinInDecode = g_Config . bSoftwareSkinning ;
2015-10-10 16:41:19 +02:00
}
2023-01-04 17:10:56 +01:00
gpuStats . numFlushes + + ;
2015-10-10 16:41:19 +02:00
gpuStats . numDrawCalls + = numDrawCalls ;
2017-06-02 11:47:14 +02:00
gpuStats . numVertsSubmitted + = vertexCountInDrawCalls_ ;
2015-10-10 16:41:19 +02:00
indexGen . Reset ( ) ;
2017-06-02 11:47:14 +02:00
decodedVerts_ = 0 ;
2015-10-10 16:41:19 +02:00
numDrawCalls = 0 ;
2017-06-02 11:47:14 +02:00
vertexCountInDrawCalls_ = 0 ;
decodeCounter_ = 0 ;
dcid_ = 0 ;
2015-10-10 16:41:19 +02:00
gstate_c . vertexFullAlpha = true ;
framebufferManager_ - > SetColorUpdated ( gstate_c . skipDrawReason ) ;
// Now seems as good a time as any to reset the min/max coords, which we may examine later.
gstate_c . vertBounds . minU = 512 ;
gstate_c . vertBounds . minV = 512 ;
gstate_c . vertBounds . maxU = 0 ;
gstate_c . vertBounds . maxV = 0 ;
2018-09-01 08:32:03 -07:00
GPUDebug : : NotifyDraw ( ) ;
2015-10-10 16:41:19 +02:00
}
2016-03-20 15:13:17 -07:00
void DrawEngineVulkan : : UpdateUBOs ( FrameData * frame ) {
if ( ( dirtyUniforms_ & DIRTY_BASE_UNIFORMS ) | | baseBuf = = VK_NULL_HANDLE ) {
2023-03-15 10:09:39 +01:00
baseUBOOffset = shaderManager_ - > PushBaseBuffer ( pushUBO_ , & baseBuf ) ;
2016-03-20 15:13:17 -07:00
dirtyUniforms_ & = ~ DIRTY_BASE_UNIFORMS ;
}
if ( ( dirtyUniforms_ & DIRTY_LIGHT_UNIFORMS ) | | lightBuf = = VK_NULL_HANDLE ) {
2023-03-15 10:09:39 +01:00
lightUBOOffset = shaderManager_ - > PushLightBuffer ( pushUBO_ , & lightBuf ) ;
2016-03-20 15:13:17 -07:00
dirtyUniforms_ & = ~ DIRTY_LIGHT_UNIFORMS ;
}
2018-04-10 12:22:02 +02:00
if ( ( dirtyUniforms_ & DIRTY_BONE_UNIFORMS ) | | boneBuf = = VK_NULL_HANDLE ) {
2023-03-15 10:09:39 +01:00
boneUBOOffset = shaderManager_ - > PushBoneBuffer ( pushUBO_ , & boneBuf ) ;
2018-04-10 12:22:02 +02:00
dirtyUniforms_ & = ~ DIRTY_BONE_UNIFORMS ;
}
2016-03-20 15:13:17 -07:00
}
2021-11-14 15:25:28 -08:00
DrawEngineVulkan : : FrameData & DrawEngineVulkan : : GetCurFrame ( ) {
VulkanContext * vulkan = ( VulkanContext * ) draw_ - > GetNativeObject ( Draw : : NativeObject : : CONTEXT ) ;
return frame_ [ vulkan - > GetCurFrame ( ) ] ;
}
2018-09-29 13:39:02 +09:00
void TessellationDataTransferVulkan : : SendDataToShader ( const SimpleVertex * const * points , int size_u , int size_v , u32 vertType , const Spline : : Weight2D & weights ) {
2018-04-12 12:00:19 +02:00
// SSBOs that are not simply float1 or float2 need to be padded up to a float4 size. vec3 members
// also need to be 16-byte aligned, hence the padding.
2017-11-12 12:07:33 +01:00
struct TessData {
float pos [ 3 ] ; float pad1 ;
float uv [ 2 ] ; float pad2 [ 2 ] ;
float color [ 4 ] ;
} ;
2018-09-22 22:06:40 +09:00
int size = size_u * size_v ;
2019-02-05 10:05:22 +01:00
int ssboAlignment = vulkan_ - > GetPhysicalDeviceProperties ( ) . properties . limits . minStorageBufferOffsetAlignment ;
2023-03-14 23:21:43 +01:00
uint8_t * data = ( uint8_t * ) push_ - > Allocate ( size * sizeof ( TessData ) , ssboAlignment , & bufInfo_ [ 0 ] . buffer , ( uint32_t * ) & bufInfo_ [ 0 ] . offset ) ;
2018-07-11 01:09:20 +09:00
bufInfo_ [ 0 ] . range = size * sizeof ( TessData ) ;
2017-11-12 12:07:33 +01:00
2018-06-28 01:41:16 +09:00
float * pos = ( float * ) ( data ) ;
float * tex = ( float * ) ( data + offsetof ( TessData , uv ) ) ;
float * col = ( float * ) ( data + offsetof ( TessData , color ) ) ;
int stride = sizeof ( TessData ) / sizeof ( float ) ;
2017-03-23 23:28:38 +09:00
2018-06-28 01:41:16 +09:00
CopyControlPoints ( pos , tex , col , stride , stride , stride , points , size , vertType ) ;
2018-07-11 01:09:20 +09:00
2018-09-29 13:39:02 +09:00
using Spline : : Weight ;
2018-07-11 01:09:20 +09:00
// Weights U
2023-03-14 23:21:43 +01:00
data = ( uint8_t * ) push_ - > Allocate ( weights . size_u * sizeof ( Weight ) , ssboAlignment , & bufInfo_ [ 1 ] . buffer , ( uint32_t * ) & bufInfo_ [ 1 ] . offset ) ;
2018-07-11 01:09:20 +09:00
memcpy ( data , weights . u , weights . size_u * sizeof ( Weight ) ) ;
bufInfo_ [ 1 ] . range = weights . size_u * sizeof ( Weight ) ;
// Weights V
2023-03-14 23:21:43 +01:00
data = ( uint8_t * ) push_ - > Allocate ( weights . size_v * sizeof ( Weight ) , ssboAlignment , & bufInfo_ [ 2 ] . buffer , ( uint32_t * ) & bufInfo_ [ 2 ] . offset ) ;
2018-07-11 01:09:20 +09:00
memcpy ( data , weights . v , weights . size_v * sizeof ( Weight ) ) ;
bufInfo_ [ 2 ] . range = weights . size_v * sizeof ( Weight ) ;
2017-11-11 21:50:24 +01:00
}