2018-05-03 07:05:36 -07:00
|
|
|
#include <algorithm>
|
2017-10-26 01:17:10 +02:00
|
|
|
#include <cstdint>
|
2018-02-26 14:23:19 +01:00
|
|
|
|
2022-06-12 13:23:24 +02:00
|
|
|
#include <map>
|
2019-08-21 00:03:00 +02:00
|
|
|
#include <sstream>
|
|
|
|
|
2018-02-26 14:23:19 +01:00
|
|
|
#include "Common/Log.h"
|
2020-09-29 12:19:22 +02:00
|
|
|
#include "Common/StringUtils.h"
|
2022-06-12 11:37:34 +02:00
|
|
|
#include "Common/TimeUtil.h"
|
2017-08-18 15:08:40 +02:00
|
|
|
|
2021-11-22 09:53:09 +01:00
|
|
|
#include "Common/GPU/Vulkan/VulkanAlloc.h"
|
2020-10-04 23:24:14 +02:00
|
|
|
#include "Common/GPU/Vulkan/VulkanContext.h"
|
|
|
|
#include "Common/GPU/Vulkan/VulkanRenderManager.h"
|
2021-11-22 09:53:09 +01:00
|
|
|
|
2020-10-01 09:27:25 +02:00
|
|
|
#include "Common/Thread/ThreadUtil.h"
|
2022-10-12 16:21:54 +02:00
|
|
|
#include "Common/VR/PPSSPPVR.h"
|
|
|
|
|
2017-10-28 18:41:54 +02:00
|
|
|
#if 0 // def _DEBUG
|
2022-09-23 19:39:00 +02:00
|
|
|
#define VLOG(...) NOTICE_LOG(G3D, __VA_ARGS__)
|
2017-10-25 17:19:00 +02:00
|
|
|
#else
|
|
|
|
#define VLOG(...)
|
|
|
|
#endif
|
|
|
|
|
2017-10-26 13:00:27 +02:00
|
|
|
#ifndef UINT64_MAX
|
|
|
|
#define UINT64_MAX 0xFFFFFFFFFFFFFFFFULL
|
|
|
|
#endif
|
|
|
|
|
2021-08-21 12:39:15 +02:00
|
|
|
using namespace PPSSPP_VK;
|
2021-08-20 11:22:57 +02:00
|
|
|
|
2022-09-06 13:30:18 +02:00
|
|
|
// renderPass is an example of the "compatibility class" or RenderPassType type.
|
2023-02-01 11:42:25 +01:00
|
|
|
bool VKRGraphicsPipeline::Create(VulkanContext *vulkan, VkRenderPass compatibleRenderPass, RenderPassType rpType, VkSampleCountFlagBits sampleCount, double scheduleTime, int countToCompile) {
|
2022-11-28 18:20:30 +01:00
|
|
|
bool multisample = RenderPassTypeHasMultisample(rpType);
|
|
|
|
if (multisample) {
|
|
|
|
if (sampleCount_ != VK_SAMPLE_COUNT_FLAG_BITS_MAX_ENUM) {
|
|
|
|
_assert_(sampleCount == sampleCount_);
|
|
|
|
} else {
|
|
|
|
sampleCount_ = sampleCount;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-12-31 11:30:47 +01:00
|
|
|
// Sanity check.
|
|
|
|
// Seen in crash reports from PowerVR GE8320, presumably we failed creating some shader modules.
|
|
|
|
if (!desc->vertexShader || !desc->fragmentShader) {
|
|
|
|
ERROR_LOG(G3D, "Failed creating graphics pipeline - missing vs/fs shader module pointers!");
|
2022-12-31 11:37:00 +01:00
|
|
|
pipeline[(size_t)rpType]->Post(VK_NULL_HANDLE);
|
2022-12-31 11:30:47 +01:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2022-06-11 23:28:26 +02:00
|
|
|
// Fill in the last part of the desc since now it's time to block.
|
|
|
|
VkShaderModule vs = desc->vertexShader->BlockUntilReady();
|
|
|
|
VkShaderModule fs = desc->fragmentShader->BlockUntilReady();
|
2022-10-01 20:01:23 -07:00
|
|
|
VkShaderModule gs = desc->geometryShader ? desc->geometryShader->BlockUntilReady() : VK_NULL_HANDLE;
|
2022-06-11 23:28:26 +02:00
|
|
|
|
2022-10-01 20:01:23 -07:00
|
|
|
if (!vs || !fs || (!gs && desc->geometryShader)) {
|
2022-06-11 23:28:26 +02:00
|
|
|
ERROR_LOG(G3D, "Failed creating graphics pipeline - missing shader modules");
|
2022-12-31 11:37:00 +01:00
|
|
|
pipeline[(size_t)rpType]->Post(VK_NULL_HANDLE);
|
2022-06-11 23:28:26 +02:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2022-12-14 15:15:31 +01:00
|
|
|
if (!compatibleRenderPass) {
|
2022-12-31 11:37:00 +01:00
|
|
|
ERROR_LOG(G3D, "Failed creating graphics pipeline - compatible render pass was nullptr");
|
|
|
|
pipeline[(size_t)rpType]->Post(VK_NULL_HANDLE);
|
2022-12-14 15:15:31 +01:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2022-10-01 12:12:46 -07:00
|
|
|
uint32_t stageCount = 2;
|
|
|
|
VkPipelineShaderStageCreateInfo ss[3]{};
|
2022-06-11 23:28:26 +02:00
|
|
|
ss[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
|
|
|
|
ss[0].stage = VK_SHADER_STAGE_VERTEX_BIT;
|
|
|
|
ss[0].pSpecializationInfo = nullptr;
|
|
|
|
ss[0].module = vs;
|
|
|
|
ss[0].pName = "main";
|
|
|
|
ss[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
|
|
|
|
ss[1].stage = VK_SHADER_STAGE_FRAGMENT_BIT;
|
|
|
|
ss[1].pSpecializationInfo = nullptr;
|
|
|
|
ss[1].module = fs;
|
|
|
|
ss[1].pName = "main";
|
2022-10-01 20:01:23 -07:00
|
|
|
if (gs) {
|
|
|
|
stageCount++;
|
|
|
|
ss[2].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
|
|
|
|
ss[2].stage = VK_SHADER_STAGE_GEOMETRY_BIT;
|
|
|
|
ss[2].pSpecializationInfo = nullptr;
|
|
|
|
ss[2].module = gs;
|
|
|
|
ss[2].pName = "main";
|
|
|
|
}
|
2022-06-11 23:28:26 +02:00
|
|
|
|
2022-09-06 13:30:18 +02:00
|
|
|
VkGraphicsPipelineCreateInfo pipe{ VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO };
|
|
|
|
pipe.pStages = ss;
|
2022-10-01 12:12:46 -07:00
|
|
|
pipe.stageCount = stageCount;
|
2022-09-06 13:30:18 +02:00
|
|
|
pipe.renderPass = compatibleRenderPass;
|
|
|
|
pipe.basePipelineIndex = 0;
|
|
|
|
pipe.pColorBlendState = &desc->cbs;
|
|
|
|
pipe.pDepthStencilState = &desc->dss;
|
|
|
|
pipe.pRasterizationState = &desc->rs;
|
|
|
|
|
2022-11-28 11:50:28 +01:00
|
|
|
VkPipelineMultisampleStateCreateInfo ms{ VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO };
|
2022-11-28 18:20:30 +01:00
|
|
|
ms.rasterizationSamples = multisample ? sampleCount : VK_SAMPLE_COUNT_1_BIT;
|
2022-11-28 23:56:55 +01:00
|
|
|
if (multisample && (flags_ & PipelineFlags::USES_DISCARD)) {
|
|
|
|
// Extreme quality
|
|
|
|
ms.sampleShadingEnable = true;
|
|
|
|
ms.minSampleShading = 1.0f;
|
|
|
|
}
|
2022-11-28 11:50:28 +01:00
|
|
|
|
2022-12-13 15:16:11 +01:00
|
|
|
VkPipelineInputAssemblyStateCreateInfo inputAssembly{ VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO };
|
|
|
|
inputAssembly.topology = desc->topology;
|
|
|
|
|
2022-09-06 13:30:18 +02:00
|
|
|
// We will use dynamic viewport state.
|
|
|
|
pipe.pVertexInputState = &desc->vis;
|
|
|
|
pipe.pViewportState = &desc->views;
|
|
|
|
pipe.pTessellationState = nullptr;
|
|
|
|
pipe.pDynamicState = &desc->ds;
|
2022-12-13 15:16:11 +01:00
|
|
|
pipe.pInputAssemblyState = &inputAssembly;
|
2022-11-28 11:50:28 +01:00
|
|
|
pipe.pMultisampleState = &ms;
|
2022-09-06 13:30:18 +02:00
|
|
|
pipe.layout = desc->pipelineLayout;
|
|
|
|
pipe.basePipelineHandle = VK_NULL_HANDLE;
|
|
|
|
pipe.basePipelineIndex = 0;
|
|
|
|
pipe.subpass = 0;
|
2022-06-11 23:28:26 +02:00
|
|
|
|
2022-06-12 11:37:34 +02:00
|
|
|
double start = time_now_d();
|
2021-12-02 17:57:40 -08:00
|
|
|
VkPipeline vkpipeline;
|
2022-09-06 13:30:18 +02:00
|
|
|
VkResult result = vkCreateGraphicsPipelines(vulkan->GetDevice(), desc->pipelineCache, 1, &pipe, nullptr, &vkpipeline);
|
2023-01-12 15:28:02 +01:00
|
|
|
|
|
|
|
double now = time_now_d();
|
|
|
|
double taken_ms_since_scheduling = (now - scheduleTime) * 1000.0;
|
|
|
|
double taken_ms = (now - start) * 1000.0;
|
2021-12-02 17:57:40 -08:00
|
|
|
|
2022-12-13 16:28:06 +01:00
|
|
|
if (taken_ms < 0.1) {
|
2023-02-01 11:42:25 +01:00
|
|
|
DEBUG_LOG(G3D, "Pipeline (x/%d) time on %s: %0.2f ms, %0.2f ms since scheduling (fast) rpType: %04x sampleBits: %d (%s)",
|
|
|
|
countToCompile, GetCurrentThreadName(), taken_ms, taken_ms_since_scheduling, (u32)rpType, (u32)sampleCount, tag_.c_str());
|
2022-12-13 16:28:06 +01:00
|
|
|
} else {
|
2023-02-01 11:42:25 +01:00
|
|
|
INFO_LOG(G3D, "Pipeline (x/%d) time on %s: %0.2f ms, %0.2f ms since scheduling rpType: %04x sampleBits: %d (%s)",
|
|
|
|
countToCompile, GetCurrentThreadName(), taken_ms, taken_ms_since_scheduling, (u32)rpType, (u32)sampleCount, tag_.c_str());
|
2022-12-13 16:28:06 +01:00
|
|
|
}
|
2022-06-12 11:37:34 +02:00
|
|
|
|
2021-12-02 17:57:40 -08:00
|
|
|
bool success = true;
|
2019-06-16 20:29:38 +02:00
|
|
|
if (result == VK_INCOMPLETE) {
|
2021-11-21 21:34:05 +01:00
|
|
|
// Bad (disallowed by spec) return value seen on Adreno in Burnout :( Try to ignore?
|
|
|
|
// Would really like to log more here, we could probably attach more info to desc.
|
|
|
|
//
|
|
|
|
// At least create a null placeholder to avoid creating over and over if something is broken.
|
2022-11-05 22:06:53 +01:00
|
|
|
pipeline[(size_t)rpType]->Post(VK_NULL_HANDLE);
|
2022-11-07 22:33:15 +01:00
|
|
|
ERROR_LOG(G3D, "Failed creating graphics pipeline! VK_INCOMPLETE");
|
|
|
|
LogCreationFailure();
|
2021-12-02 17:57:40 -08:00
|
|
|
success = false;
|
2019-06-16 20:29:38 +02:00
|
|
|
} else if (result != VK_SUCCESS) {
|
2022-11-05 22:06:53 +01:00
|
|
|
pipeline[(size_t)rpType]->Post(VK_NULL_HANDLE);
|
2019-06-16 20:29:38 +02:00
|
|
|
ERROR_LOG(G3D, "Failed creating graphics pipeline! result='%s'", VulkanResultToString(result));
|
2022-11-07 22:33:15 +01:00
|
|
|
LogCreationFailure();
|
2021-12-02 17:57:40 -08:00
|
|
|
success = false;
|
2019-06-16 20:29:38 +02:00
|
|
|
} else {
|
2022-09-07 16:11:15 +02:00
|
|
|
// Success!
|
2022-11-28 18:20:30 +01:00
|
|
|
if (!tag_.empty()) {
|
|
|
|
vulkan->SetDebugName(vkpipeline, VK_OBJECT_TYPE_PIPELINE, tag_.c_str());
|
2022-09-07 16:11:15 +02:00
|
|
|
}
|
2022-11-05 22:06:53 +01:00
|
|
|
pipeline[(size_t)rpType]->Post(vkpipeline);
|
2019-06-16 20:29:38 +02:00
|
|
|
}
|
2021-12-02 17:57:40 -08:00
|
|
|
|
|
|
|
return success;
|
2019-06-16 20:29:38 +02:00
|
|
|
}
|
|
|
|
|
2022-12-01 23:41:31 +01:00
|
|
|
void VKRGraphicsPipeline::DestroyVariants(VulkanContext *vulkan, bool msaaOnly) {
|
2022-11-05 22:06:53 +01:00
|
|
|
for (size_t i = 0; i < (size_t)RenderPassType::TYPE_COUNT; i++) {
|
2022-11-07 22:33:15 +01:00
|
|
|
if (!this->pipeline[i])
|
2022-09-07 16:11:15 +02:00
|
|
|
continue;
|
2022-12-01 23:41:31 +01:00
|
|
|
if (msaaOnly && (i & (int)RenderPassType::MULTISAMPLE) == 0)
|
|
|
|
continue;
|
|
|
|
|
2022-09-07 16:11:15 +02:00
|
|
|
VkPipeline pipeline = this->pipeline[i]->BlockUntilReady();
|
2022-11-07 22:33:15 +01:00
|
|
|
// pipeline can be nullptr here, if it failed to compile before.
|
|
|
|
if (pipeline) {
|
|
|
|
vulkan->Delete().QueueDeletePipeline(pipeline);
|
|
|
|
}
|
2022-11-28 18:20:30 +01:00
|
|
|
this->pipeline[i] = nullptr;
|
2022-09-07 16:11:15 +02:00
|
|
|
}
|
2022-11-28 18:20:30 +01:00
|
|
|
sampleCount_ = VK_SAMPLE_COUNT_FLAG_BITS_MAX_ENUM;
|
|
|
|
}
|
|
|
|
|
2023-01-09 09:48:41 +01:00
|
|
|
void VKRGraphicsPipeline::DestroyVariantsInstant(VkDevice device) {
|
|
|
|
for (size_t i = 0; i < (size_t)RenderPassType::TYPE_COUNT; i++) {
|
|
|
|
if (pipeline[i]) {
|
|
|
|
vkDestroyPipeline(device, pipeline[i]->BlockUntilReady(), nullptr);
|
|
|
|
delete pipeline[i];
|
|
|
|
pipeline[i] = nullptr;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
VKRGraphicsPipeline::~VKRGraphicsPipeline() {
|
|
|
|
// This is called from the callbacked queued in QueueForDeletion.
|
|
|
|
// Here we are free to directly delete stuff, don't need to queue.
|
|
|
|
for (size_t i = 0; i < (size_t)RenderPassType::TYPE_COUNT; i++) {
|
|
|
|
_assert_(!pipeline[i]);
|
|
|
|
}
|
|
|
|
if (desc)
|
|
|
|
desc->Release();
|
|
|
|
}
|
|
|
|
|
2022-11-28 18:20:30 +01:00
|
|
|
void VKRGraphicsPipeline::QueueForDeletion(VulkanContext *vulkan) {
|
2023-01-09 09:48:41 +01:00
|
|
|
// Can't destroy variants here, the pipeline still lives for a while.
|
|
|
|
vulkan->Delete().QueueCallback([](VulkanContext *vulkan, void *p) {
|
2022-09-07 19:29:54 +02:00
|
|
|
VKRGraphicsPipeline *pipeline = (VKRGraphicsPipeline *)p;
|
2023-01-09 09:48:41 +01:00
|
|
|
pipeline->DestroyVariantsInstant(vulkan->GetDevice());
|
2022-09-07 19:29:54 +02:00
|
|
|
delete pipeline;
|
|
|
|
}, this);
|
2022-09-07 16:11:15 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
u32 VKRGraphicsPipeline::GetVariantsBitmask() const {
|
|
|
|
u32 bitmask = 0;
|
2022-11-05 22:06:53 +01:00
|
|
|
for (size_t i = 0; i < (size_t)RenderPassType::TYPE_COUNT; i++) {
|
2022-09-07 16:11:15 +02:00
|
|
|
if (pipeline[i]) {
|
|
|
|
bitmask |= 1 << i;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return bitmask;
|
|
|
|
}
|
2022-11-07 22:33:15 +01:00
|
|
|
|
|
|
|
void VKRGraphicsPipeline::LogCreationFailure() const {
|
|
|
|
ERROR_LOG(G3D, "vs: %s\n[END VS]", desc->vertexShaderSource.c_str());
|
|
|
|
ERROR_LOG(G3D, "fs: %s\n[END FS]", desc->fragmentShaderSource.c_str());
|
|
|
|
if (desc->geometryShader) {
|
|
|
|
ERROR_LOG(G3D, "gs: %s\n[END GS]", desc->geometryShaderSource.c_str());
|
|
|
|
}
|
|
|
|
// TODO: Maybe log various other state?
|
|
|
|
ERROR_LOG(G3D, "======== END OF PIPELINE ==========");
|
|
|
|
}
|
2022-09-07 16:11:15 +02:00
|
|
|
|
2022-06-12 13:23:24 +02:00
|
|
|
bool VKRComputePipeline::CreateAsync(VulkanContext *vulkan) {
|
2019-06-16 20:29:38 +02:00
|
|
|
if (!desc) {
|
|
|
|
// Already failed to create this one.
|
|
|
|
return false;
|
|
|
|
}
|
2022-06-12 13:23:24 +02:00
|
|
|
pipeline->SpawnEmpty(&g_threadManager, [=] {
|
|
|
|
VkPipeline vkpipeline;
|
|
|
|
VkResult result = vkCreateComputePipelines(vulkan->GetDevice(), desc->pipelineCache, 1, &desc->pipe, nullptr, &vkpipeline);
|
2021-12-02 17:57:40 -08:00
|
|
|
|
2022-06-12 13:23:24 +02:00
|
|
|
bool success = true;
|
|
|
|
if (result == VK_SUCCESS) {
|
|
|
|
return vkpipeline;
|
|
|
|
} else {
|
|
|
|
ERROR_LOG(G3D, "Failed creating compute pipeline! result='%s'", VulkanResultToString(result));
|
|
|
|
success = false;
|
|
|
|
return (VkPipeline)VK_NULL_HANDLE;
|
|
|
|
}
|
|
|
|
delete desc;
|
|
|
|
}, TaskType::CPU_COMPUTE);
|
2021-12-02 17:57:40 -08:00
|
|
|
desc = nullptr;
|
2022-06-12 13:23:24 +02:00
|
|
|
return true;
|
2019-06-16 20:29:38 +02:00
|
|
|
}
|
|
|
|
|
2022-10-10 10:53:27 +02:00
|
|
|
VulkanRenderManager::VulkanRenderManager(VulkanContext *vulkan)
|
|
|
|
: vulkan_(vulkan), queueRunner_(vulkan),
|
|
|
|
initTimeMs_("initTimeMs"),
|
|
|
|
totalGPUTimeMs_("totalGPUTimeMs"),
|
|
|
|
renderCPUTimeMs_("renderCPUTimeMs")
|
|
|
|
{
|
2020-03-08 17:03:58 +01:00
|
|
|
inflightFramesAtStart_ = vulkan_->GetInflightFrames();
|
2019-08-21 00:03:00 +02:00
|
|
|
|
2022-09-19 18:07:50 +02:00
|
|
|
frameDataShared_.Init(vulkan);
|
2022-09-17 01:34:38 +02:00
|
|
|
|
2022-09-19 18:07:50 +02:00
|
|
|
for (int i = 0; i < inflightFramesAtStart_; i++) {
|
2022-09-19 19:27:50 +02:00
|
|
|
frameData_[i].Init(vulkan, i);
|
2017-08-19 17:32:10 +02:00
|
|
|
}
|
2017-10-25 17:19:00 +02:00
|
|
|
|
2017-10-27 22:10:36 +02:00
|
|
|
queueRunner_.CreateDeviceObjects();
|
2017-08-22 12:55:30 +02:00
|
|
|
}
|
2017-08-19 17:32:10 +02:00
|
|
|
|
2020-11-01 23:37:32 +01:00
|
|
|
bool VulkanRenderManager::CreateBackbuffers() {
|
|
|
|
if (!vulkan_->GetSwapchain()) {
|
|
|
|
ERROR_LOG(G3D, "No swapchain - can't create backbuffers");
|
|
|
|
return false;
|
|
|
|
}
|
2017-08-19 17:32:10 +02:00
|
|
|
|
|
|
|
|
2017-08-22 12:55:30 +02:00
|
|
|
VkCommandBuffer cmdInit = GetInitCmd();
|
2017-08-19 17:32:10 +02:00
|
|
|
|
2022-09-17 08:43:13 +02:00
|
|
|
if (!queueRunner_.CreateSwapchain(cmdInit)) {
|
|
|
|
return false;
|
2017-08-19 17:32:10 +02:00
|
|
|
}
|
2017-08-22 12:55:30 +02:00
|
|
|
|
2020-10-11 13:07:08 +02:00
|
|
|
curWidthRaw_ = -1;
|
|
|
|
curHeightRaw_ = -1;
|
2017-08-22 17:18:54 +02:00
|
|
|
|
2017-11-12 21:50:54 -08:00
|
|
|
if (HasBackbuffers()) {
|
|
|
|
VLOG("Backbuffers Created");
|
|
|
|
}
|
2017-11-05 22:18:28 +01:00
|
|
|
|
2020-03-02 19:21:15 -08:00
|
|
|
if (newInflightFrames_ != -1) {
|
2020-08-15 15:51:41 +02:00
|
|
|
INFO_LOG(G3D, "Updating inflight frames to %d", newInflightFrames_);
|
2020-03-02 19:21:15 -08:00
|
|
|
vulkan_->UpdateInflightFrames(newInflightFrames_);
|
|
|
|
newInflightFrames_ = -1;
|
|
|
|
}
|
|
|
|
|
2020-06-21 22:34:37 +02:00
|
|
|
outOfDateFrames_ = 0;
|
|
|
|
|
2017-08-22 17:18:54 +02:00
|
|
|
// Start the thread.
|
2022-09-19 18:17:26 +02:00
|
|
|
if (HasBackbuffers()) {
|
2022-09-23 19:39:00 +02:00
|
|
|
run_ = true; // For controlling the compiler thread's exit
|
|
|
|
|
|
|
|
INFO_LOG(G3D, "Starting Vulkan submission thread");
|
2017-08-22 17:18:54 +02:00
|
|
|
thread_ = std::thread(&VulkanRenderManager::ThreadFunc, this);
|
2019-06-16 21:57:22 +02:00
|
|
|
INFO_LOG(G3D, "Starting Vulkan compiler thread");
|
|
|
|
compileThread_ = std::thread(&VulkanRenderManager::CompileThreadFunc, this);
|
2017-08-22 17:18:54 +02:00
|
|
|
}
|
2020-11-01 23:37:32 +01:00
|
|
|
return true;
|
2017-08-19 17:32:10 +02:00
|
|
|
}
|
|
|
|
|
2022-09-23 19:39:00 +02:00
|
|
|
// Called from main thread.
|
2017-11-09 16:28:22 +01:00
|
|
|
void VulkanRenderManager::StopThread() {
|
2022-09-23 19:39:00 +02:00
|
|
|
{
|
|
|
|
// Tell the render thread to quit when it's done.
|
|
|
|
VKRRenderThreadTask task;
|
|
|
|
task.frame = vulkan_->GetCurFrame();
|
|
|
|
task.runType = VKRRunType::EXIT;
|
2022-09-23 22:10:21 +02:00
|
|
|
std::unique_lock<std::mutex> lock(pushMutex_);
|
2022-09-23 19:39:00 +02:00
|
|
|
renderThreadQueue_.push(task);
|
|
|
|
pushCondVar_.notify_one();
|
2022-09-19 18:17:26 +02:00
|
|
|
}
|
2017-11-06 22:56:59 -08:00
|
|
|
|
2022-09-23 19:39:00 +02:00
|
|
|
// Compiler thread still relies on this.
|
2022-09-19 18:17:26 +02:00
|
|
|
run_ = false;
|
2022-09-23 19:39:00 +02:00
|
|
|
|
2022-09-19 18:17:26 +02:00
|
|
|
// Stop the thread.
|
2022-09-23 19:39:00 +02:00
|
|
|
thread_.join();
|
|
|
|
|
2022-09-19 18:17:26 +02:00
|
|
|
for (int i = 0; i < vulkan_->GetInflightFrames(); i++) {
|
|
|
|
auto &frameData = frameData_[i];
|
|
|
|
// Zero the queries so we don't try to pull them later.
|
|
|
|
frameData.profile.timestampDescriptions.clear();
|
|
|
|
}
|
2022-09-23 19:39:00 +02:00
|
|
|
|
2022-09-19 18:17:26 +02:00
|
|
|
INFO_LOG(G3D, "Vulkan submission thread joined. Frame=%d", vulkan_->GetCurFrame());
|
2022-09-23 19:39:00 +02:00
|
|
|
|
2023-01-01 06:36:29 -08:00
|
|
|
if (compileThread_.joinable()) {
|
|
|
|
// Lock to avoid race conditions.
|
|
|
|
std::lock_guard<std::mutex> guard(compileMutex_);
|
|
|
|
compileCond_.notify_all();
|
|
|
|
}
|
2022-09-19 18:17:26 +02:00
|
|
|
compileThread_.join();
|
|
|
|
INFO_LOG(G3D, "Vulkan compiler thread joined.");
|
|
|
|
|
|
|
|
// Eat whatever has been queued up for this frame if anything.
|
|
|
|
Wipe();
|
|
|
|
|
2022-09-23 19:39:00 +02:00
|
|
|
// Clean out any remaining queued data, which might refer to things that might not be valid
|
|
|
|
// when we restart the thread...
|
|
|
|
|
|
|
|
// Not sure if this is still needed
|
2022-09-19 18:17:26 +02:00
|
|
|
for (int i = 0; i < vulkan_->GetInflightFrames(); i++) {
|
|
|
|
auto &frameData = frameData_[i];
|
|
|
|
if (frameData.hasInitCommands) {
|
|
|
|
// Clear 'em out. This can happen on restart sometimes.
|
|
|
|
vkEndCommandBuffer(frameData.initCmd);
|
|
|
|
frameData.hasInitCommands = false;
|
|
|
|
}
|
2022-09-23 19:39:00 +02:00
|
|
|
if (frameData.hasMainCommands) {
|
|
|
|
vkEndCommandBuffer(frameData.mainCmd);
|
|
|
|
frameData.hasMainCommands = false;
|
2022-09-19 18:17:26 +02:00
|
|
|
}
|
2022-09-23 19:39:00 +02:00
|
|
|
if (frameData.hasPresentCommands) {
|
|
|
|
vkEndCommandBuffer(frameData.presentCmd);
|
|
|
|
frameData.hasPresentCommands = false;
|
2017-11-05 08:40:11 -08:00
|
|
|
}
|
2017-08-22 17:18:54 +02:00
|
|
|
}
|
2017-11-05 08:40:11 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
void VulkanRenderManager::DestroyBackbuffers() {
|
2017-11-09 16:28:22 +01:00
|
|
|
StopThread();
|
2017-11-05 08:39:02 -08:00
|
|
|
vulkan_->WaitUntilQueueIdle();
|
|
|
|
|
2022-09-17 08:43:13 +02:00
|
|
|
queueRunner_.DestroyBackBuffers();
|
2017-08-22 12:55:30 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
VulkanRenderManager::~VulkanRenderManager() {
|
2020-08-15 15:51:41 +02:00
|
|
|
INFO_LOG(G3D, "VulkanRenderManager destructor");
|
2022-09-23 22:10:21 +02:00
|
|
|
|
|
|
|
_dbg_assert_(!run_); // StopThread should already have been called from DestroyBackbuffers.
|
|
|
|
|
2017-08-22 12:55:30 +02:00
|
|
|
vulkan_->WaitUntilQueueIdle();
|
2017-11-05 08:40:11 -08:00
|
|
|
|
|
|
|
VkDevice device = vulkan_->GetDevice();
|
2022-09-19 18:07:50 +02:00
|
|
|
frameDataShared_.Destroy(vulkan_);
|
2020-03-08 17:03:58 +01:00
|
|
|
for (int i = 0; i < inflightFramesAtStart_; i++) {
|
2022-09-19 18:07:50 +02:00
|
|
|
frameData_[i].Destroy(vulkan_);
|
2017-08-19 17:32:10 +02:00
|
|
|
}
|
2017-10-27 22:10:36 +02:00
|
|
|
queueRunner_.DestroyDeviceObjects();
|
2017-08-19 17:32:10 +02:00
|
|
|
}
|
|
|
|
|
2022-06-12 13:23:24 +02:00
|
|
|
struct SinglePipelineTask {
|
|
|
|
VKRGraphicsPipeline *pipeline;
|
|
|
|
VkRenderPass compatibleRenderPass;
|
|
|
|
RenderPassType rpType;
|
|
|
|
VkSampleCountFlagBits sampleCount;
|
2023-01-12 15:28:02 +01:00
|
|
|
double scheduleTime;
|
2023-02-01 11:42:25 +01:00
|
|
|
int countToCompile;
|
2022-06-12 13:23:24 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
class CreateMultiPipelinesTask : public Task {
|
|
|
|
public:
|
|
|
|
CreateMultiPipelinesTask(VulkanContext *vulkan, std::vector<SinglePipelineTask> tasks) : vulkan_(vulkan), tasks_(tasks) {}
|
|
|
|
~CreateMultiPipelinesTask() {}
|
|
|
|
|
|
|
|
TaskType Type() const override {
|
|
|
|
return TaskType::CPU_COMPUTE;
|
|
|
|
}
|
|
|
|
|
|
|
|
void Run() override {
|
|
|
|
for (auto &task : tasks_) {
|
2023-02-01 11:42:25 +01:00
|
|
|
task.pipeline->Create(vulkan_, task.compatibleRenderPass, task.rpType, task.sampleCount, task.scheduleTime, task.countToCompile);
|
2022-06-12 13:23:24 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
VulkanContext *vulkan_;
|
|
|
|
std::vector<SinglePipelineTask> tasks_;
|
|
|
|
};
|
|
|
|
|
2019-06-16 21:57:22 +02:00
|
|
|
void VulkanRenderManager::CompileThreadFunc() {
|
|
|
|
SetCurrentThreadName("ShaderCompile");
|
|
|
|
while (true) {
|
|
|
|
std::vector<CompileQueueEntry> toCompile;
|
|
|
|
{
|
|
|
|
std::unique_lock<std::mutex> lock(compileMutex_);
|
2022-06-12 13:23:24 +02:00
|
|
|
// TODO: Should this be while?
|
|
|
|
// It may be beneficial also to unlock and wait a little bit to see if we get some more shaders
|
|
|
|
// so we can do a better job of thread-sorting them.
|
2023-01-01 06:36:29 -08:00
|
|
|
if (compileQueue_.empty() && run_) {
|
2019-06-16 21:57:22 +02:00
|
|
|
compileCond_.wait(lock);
|
|
|
|
}
|
|
|
|
toCompile = std::move(compileQueue_);
|
|
|
|
compileQueue_.clear();
|
|
|
|
}
|
|
|
|
if (!run_) {
|
|
|
|
break;
|
|
|
|
}
|
2021-11-20 22:34:51 +01:00
|
|
|
|
2023-02-01 11:42:25 +01:00
|
|
|
int countToCompile = (int)toCompile.size();
|
2022-06-12 13:23:24 +02:00
|
|
|
|
|
|
|
// Here we sort the pending pipelines by vertex and fragment shaders,
|
|
|
|
std::map<std::pair<Promise<VkShaderModule> *, Promise<VkShaderModule> *>, std::vector<SinglePipelineTask>> map;
|
|
|
|
|
2023-01-12 15:28:02 +01:00
|
|
|
double scheduleTime = time_now_d();
|
|
|
|
|
2023-01-12 16:21:21 +01:00
|
|
|
// Here we sort pending graphics pipelines by vertex and fragment shaders, and split up further.
|
|
|
|
// Those with the same pairs of shaders should be on the same thread, at least on NVIDIA.
|
|
|
|
// I don't think PowerVR cares though, it doesn't seem to reuse information between the compiles,
|
|
|
|
// so we might want a different splitting algorithm there.
|
2021-12-09 23:52:10 -08:00
|
|
|
for (auto &entry : toCompile) {
|
2019-06-16 21:57:22 +02:00
|
|
|
switch (entry.type) {
|
|
|
|
case CompileQueueEntry::Type::GRAPHICS:
|
2022-06-12 13:23:24 +02:00
|
|
|
map[std::pair< Promise<VkShaderModule> *, Promise<VkShaderModule> *>(entry.graphics->desc->vertexShader, entry.graphics->desc->fragmentShader)].push_back(
|
|
|
|
SinglePipelineTask{
|
|
|
|
entry.graphics,
|
|
|
|
entry.compatibleRenderPass,
|
|
|
|
entry.renderPassType,
|
|
|
|
entry.sampleCount,
|
2023-02-01 11:42:25 +01:00
|
|
|
scheduleTime, // these two are for logging purposes.
|
|
|
|
countToCompile,
|
2022-06-12 13:23:24 +02:00
|
|
|
}
|
|
|
|
);
|
2019-06-16 21:57:22 +02:00
|
|
|
break;
|
|
|
|
case CompileQueueEntry::Type::COMPUTE:
|
2022-06-12 13:23:24 +02:00
|
|
|
// Queue up pending compute pipelines on separate tasks.
|
|
|
|
entry.compute->CreateAsync(vulkan_);
|
2019-06-16 21:57:22 +02:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
2022-12-13 16:28:06 +01:00
|
|
|
|
2022-06-12 13:23:24 +02:00
|
|
|
for (auto iter : map) {
|
|
|
|
auto &shaders = iter.first;
|
|
|
|
auto &entries = iter.second;
|
|
|
|
|
2023-01-12 15:28:02 +01:00
|
|
|
// NOTICE_LOG(G3D, "For this shader pair, we have %d pipelines to create", (int)entries.size());
|
2022-06-12 13:23:24 +02:00
|
|
|
|
|
|
|
Task *task = new CreateMultiPipelinesTask(vulkan_, entries);
|
|
|
|
g_threadManager.EnqueueTask(task);
|
2022-12-13 16:28:06 +01:00
|
|
|
}
|
|
|
|
|
2019-06-16 21:57:22 +02:00
|
|
|
queueRunner_.NotifyCompileDone();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void VulkanRenderManager::DrainCompileQueue() {
|
|
|
|
std::unique_lock<std::mutex> lock(compileMutex_);
|
2022-09-10 08:30:09 -07:00
|
|
|
compileCond_.notify_all();
|
2019-06-16 21:57:22 +02:00
|
|
|
while (!compileQueue_.empty()) {
|
|
|
|
queueRunner_.WaitForCompileNotification();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-08-19 17:32:10 +02:00
|
|
|
void VulkanRenderManager::ThreadFunc() {
|
2020-12-01 00:46:26 +01:00
|
|
|
SetCurrentThreadName("RenderMan");
|
2017-11-06 22:56:59 -08:00
|
|
|
while (true) {
|
2022-09-23 19:39:00 +02:00
|
|
|
// Pop a task of the queue and execute it.
|
|
|
|
VKRRenderThreadTask task;
|
2017-08-28 17:10:10 +02:00
|
|
|
{
|
2022-09-23 19:39:00 +02:00
|
|
|
std::unique_lock<std::mutex> lock(pushMutex_);
|
|
|
|
while (renderThreadQueue_.empty()) {
|
|
|
|
pushCondVar_.wait(lock);
|
2017-08-28 17:10:10 +02:00
|
|
|
}
|
2022-09-23 19:39:00 +02:00
|
|
|
task = renderThreadQueue_.front();
|
|
|
|
renderThreadQueue_.pop();
|
2017-08-22 17:18:54 +02:00
|
|
|
}
|
2022-09-23 19:39:00 +02:00
|
|
|
|
|
|
|
// Oh, we got a task! We can now have pushMutex_ unlocked, allowing the host to
|
|
|
|
// push more work when it feels like it, and just start working.
|
|
|
|
if (task.runType == VKRRunType::EXIT) {
|
|
|
|
// Oh, host wanted out. Let's leave.
|
|
|
|
break;
|
2017-11-10 15:10:36 +01:00
|
|
|
}
|
2022-09-23 19:39:00 +02:00
|
|
|
|
|
|
|
Run(task);
|
2017-08-19 17:32:10 +02:00
|
|
|
}
|
2017-11-10 15:10:36 +01:00
|
|
|
|
|
|
|
// Wait for the device to be done with everything, before tearing stuff down.
|
2022-09-23 19:39:00 +02:00
|
|
|
// TODO: Do we need this?
|
2017-11-10 15:10:36 +01:00
|
|
|
vkDeviceWaitIdle(vulkan_->GetDevice());
|
|
|
|
|
2017-11-04 22:21:47 -07:00
|
|
|
VLOG("PULL: Quitting");
|
2017-08-19 17:32:10 +02:00
|
|
|
}
|
2017-08-16 23:03:30 +02:00
|
|
|
|
2021-12-19 22:49:42 +01:00
|
|
|
void VulkanRenderManager::BeginFrame(bool enableProfiling, bool enableLogProfiler) {
|
2017-11-05 22:18:28 +01:00
|
|
|
VLOG("BeginFrame");
|
2017-08-19 17:32:10 +02:00
|
|
|
VkDevice device = vulkan_->GetDevice();
|
|
|
|
|
2017-08-22 17:44:28 +02:00
|
|
|
int curFrame = vulkan_->GetCurFrame();
|
|
|
|
FrameData &frameData = frameData_[curFrame];
|
2017-08-19 17:32:10 +02:00
|
|
|
|
2017-10-25 17:19:00 +02:00
|
|
|
VLOG("PUSH: Fencing %d", curFrame);
|
2020-11-16 23:26:37 +01:00
|
|
|
|
2022-09-23 20:44:47 +02:00
|
|
|
// Makes sure the submission from the previous time around has happened. Otherwise
|
|
|
|
// we are not allowed to wait from another thread here..
|
|
|
|
{
|
|
|
|
std::unique_lock<std::mutex> lock(frameData.fenceMutex);
|
|
|
|
while (!frameData.readyForFence) {
|
|
|
|
frameData.fenceCondVar.wait(lock);
|
|
|
|
}
|
|
|
|
frameData.readyForFence = false;
|
|
|
|
}
|
|
|
|
|
2022-09-23 18:39:00 +02:00
|
|
|
// This must be the very first Vulkan call we do in a new frame.
|
2022-09-23 19:39:00 +02:00
|
|
|
// Makes sure the very last command buffer from the frame before the previous has been fully executed.
|
2022-09-07 19:29:54 +02:00
|
|
|
if (vkWaitForFences(device, 1, &frameData.fence, true, UINT64_MAX) == VK_ERROR_DEVICE_LOST) {
|
|
|
|
_assert_msg_(false, "Device lost in vkWaitForFences");
|
|
|
|
}
|
2017-08-19 17:32:10 +02:00
|
|
|
vkResetFences(device, 1, &frameData.fence);
|
|
|
|
|
2022-12-15 10:45:45 +01:00
|
|
|
int validBits = vulkan_->GetQueueFamilyProperties(vulkan_->GetGraphicsQueueFamilyIndex()).timestampValidBits;
|
|
|
|
|
2020-11-16 23:26:37 +01:00
|
|
|
// Can't set this until after the fence.
|
2022-12-15 10:45:45 +01:00
|
|
|
frameData.profilingEnabled_ = enableProfiling && validBits > 0;
|
2020-07-13 23:59:09 +02:00
|
|
|
|
2019-08-21 00:03:00 +02:00
|
|
|
uint64_t queryResults[MAX_TIMESTAMP_QUERIES];
|
2019-08-21 10:23:36 +02:00
|
|
|
|
2019-08-21 16:46:58 +02:00
|
|
|
if (frameData.profilingEnabled_) {
|
2019-08-21 00:03:00 +02:00
|
|
|
// Pull the profiling results from last time and produce a summary!
|
2019-09-18 00:16:08 +02:00
|
|
|
if (!frameData.profile.timestampDescriptions.empty()) {
|
|
|
|
int numQueries = (int)frameData.profile.timestampDescriptions.size();
|
2019-08-21 00:03:00 +02:00
|
|
|
VkResult res = vkGetQueryPoolResults(
|
|
|
|
vulkan_->GetDevice(),
|
2019-09-18 00:16:08 +02:00
|
|
|
frameData.profile.queryPool, 0, numQueries, sizeof(uint64_t) * numQueries, &queryResults[0], sizeof(uint64_t),
|
2019-08-21 00:03:00 +02:00
|
|
|
VK_QUERY_RESULT_64_BIT);
|
|
|
|
if (res == VK_SUCCESS) {
|
|
|
|
double timestampConversionFactor = (double)vulkan_->GetPhysicalDeviceProperties().properties.limits.timestampPeriod * (1.0 / 1000000.0);
|
2019-08-21 09:02:23 +02:00
|
|
|
uint64_t timestampDiffMask = validBits == 64 ? 0xFFFFFFFFFFFFFFFFULL : ((1ULL << validBits) - 1);
|
2019-08-21 00:03:00 +02:00
|
|
|
std::stringstream str;
|
|
|
|
|
|
|
|
char line[256];
|
2022-10-10 10:53:27 +02:00
|
|
|
totalGPUTimeMs_.Update(((double)((queryResults[numQueries - 1] - queryResults[0]) & timestampDiffMask) * timestampConversionFactor));
|
|
|
|
totalGPUTimeMs_.Format(line, sizeof(line));
|
2019-08-21 00:03:00 +02:00
|
|
|
str << line;
|
2022-10-10 10:53:27 +02:00
|
|
|
renderCPUTimeMs_.Update((frameData.profile.cpuEndTime - frameData.profile.cpuStartTime) * 1000.0);
|
|
|
|
renderCPUTimeMs_.Format(line, sizeof(line));
|
2019-09-18 00:16:08 +02:00
|
|
|
str << line;
|
2019-08-21 10:30:57 +02:00
|
|
|
for (int i = 0; i < numQueries - 1; i++) {
|
2019-08-21 00:03:00 +02:00
|
|
|
uint64_t diff = (queryResults[i + 1] - queryResults[i]) & timestampDiffMask;
|
|
|
|
double milliseconds = (double)diff * timestampConversionFactor;
|
2022-10-10 10:53:27 +02:00
|
|
|
|
|
|
|
// Can't use SimpleStat for these very easily since these are dynamic per frame.
|
|
|
|
// Only the first one is static, the initCmd.
|
|
|
|
// Could try some hashtable tracking for the rest, later.
|
|
|
|
if (i == 0) {
|
|
|
|
initTimeMs_.Update(milliseconds);
|
|
|
|
initTimeMs_.Format(line, sizeof(line));
|
|
|
|
} else {
|
|
|
|
snprintf(line, sizeof(line), "%s: %0.3f ms\n", frameData.profile.timestampDescriptions[i + 1].c_str(), milliseconds);
|
|
|
|
}
|
2019-08-21 00:03:00 +02:00
|
|
|
str << line;
|
|
|
|
}
|
2019-09-18 00:16:08 +02:00
|
|
|
frameData.profile.profileSummary = str.str();
|
2019-08-21 00:03:00 +02:00
|
|
|
} else {
|
2019-09-18 00:16:08 +02:00
|
|
|
frameData.profile.profileSummary = "(error getting GPU profile - not ready?)";
|
2019-08-21 00:03:00 +02:00
|
|
|
}
|
2019-09-18 00:16:08 +02:00
|
|
|
} else {
|
|
|
|
frameData.profile.profileSummary = "(no GPU profile data collected)";
|
2019-08-21 00:03:00 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-08-22 12:55:30 +02:00
|
|
|
// Must be after the fence - this performs deletes.
|
2017-10-25 17:19:00 +02:00
|
|
|
VLOG("PUSH: BeginFrame %d", curFrame);
|
2021-12-12 11:56:29 +01:00
|
|
|
|
2022-10-21 12:52:21 +02:00
|
|
|
insideFrame_ = true;
|
2021-12-19 22:49:42 +01:00
|
|
|
vulkan_->BeginFrame(enableLogProfiler ? GetInitCmd() : VK_NULL_HANDLE);
|
2017-08-19 17:32:10 +02:00
|
|
|
|
2019-09-18 00:16:08 +02:00
|
|
|
frameData.profile.timestampDescriptions.clear();
|
2020-08-02 15:41:00 +02:00
|
|
|
if (frameData.profilingEnabled_) {
|
2019-08-21 00:03:00 +02:00
|
|
|
// For various reasons, we need to always use an init cmd buffer in this case to perform the vkCmdResetQueryPool,
|
|
|
|
// unless we want to limit ourselves to only measure the main cmd buffer.
|
2020-11-16 23:26:37 +01:00
|
|
|
// Later versions of Vulkan have support for clearing queries on the CPU timeline, but we don't want to rely on that.
|
2019-08-21 00:03:00 +02:00
|
|
|
// Reserve the first two queries for initCmd.
|
2019-09-18 00:16:08 +02:00
|
|
|
frameData.profile.timestampDescriptions.push_back("initCmd Begin");
|
|
|
|
frameData.profile.timestampDescriptions.push_back("initCmd");
|
2019-08-21 00:03:00 +02:00
|
|
|
VkCommandBuffer initCmd = GetInitCmd();
|
|
|
|
}
|
2017-08-19 17:32:10 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
VkCommandBuffer VulkanRenderManager::GetInitCmd() {
|
2017-08-22 12:55:30 +02:00
|
|
|
int curFrame = vulkan_->GetCurFrame();
|
2022-09-20 14:49:39 +02:00
|
|
|
return frameData_[curFrame].GetInitCmd(vulkan_);
|
2017-08-18 15:08:40 +02:00
|
|
|
}
|
|
|
|
|
2023-01-13 10:14:29 +01:00
|
|
|
VKRGraphicsPipeline *VulkanRenderManager::CreateGraphicsPipeline(VKRGraphicsPipelineDesc *desc, PipelineFlags pipelineFlags, uint32_t variantBitmask, VkSampleCountFlagBits sampleCount, bool cacheLoad, const char *tag) {
|
2022-11-28 23:56:55 +01:00
|
|
|
VKRGraphicsPipeline *pipeline = new VKRGraphicsPipeline(pipelineFlags, tag);
|
2022-12-31 11:30:47 +01:00
|
|
|
|
|
|
|
if (!desc->vertexShader || !desc->fragmentShader) {
|
|
|
|
ERROR_LOG(G3D, "Can't create graphics pipeline with missing vs/ps: %p %p", desc->vertexShader, desc->fragmentShader);
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
2022-09-07 15:19:20 +02:00
|
|
|
pipeline->desc = desc;
|
2022-12-03 14:52:06 -08:00
|
|
|
pipeline->desc->AddRef();
|
2023-01-13 10:14:29 +01:00
|
|
|
if (curRenderStep_ && !cacheLoad) {
|
|
|
|
// The common case during gameplay.
|
2022-09-07 15:19:20 +02:00
|
|
|
pipelinesToCheck_.push_back(pipeline);
|
|
|
|
} else {
|
|
|
|
if (!variantBitmask) {
|
|
|
|
WARN_LOG(G3D, "WARNING: Will not compile any variants of pipeline, not in renderpass and empty variantBitmask");
|
|
|
|
}
|
|
|
|
// Presumably we're in initialization, loading the shader cache.
|
|
|
|
// Look at variantBitmask to see what variants we should queue up.
|
|
|
|
RPKey key{
|
|
|
|
VKRRenderPassLoadAction::CLEAR, VKRRenderPassLoadAction::CLEAR, VKRRenderPassLoadAction::CLEAR,
|
|
|
|
VKRRenderPassStoreAction::STORE, VKRRenderPassStoreAction::DONT_CARE, VKRRenderPassStoreAction::DONT_CARE,
|
|
|
|
};
|
|
|
|
VKRRenderPass *compatibleRenderPass = queueRunner_.GetRenderPass(key);
|
|
|
|
compileMutex_.lock();
|
2022-09-10 08:30:09 -07:00
|
|
|
bool needsCompile = false;
|
2022-11-05 22:06:53 +01:00
|
|
|
for (size_t i = 0; i < (size_t)RenderPassType::TYPE_COUNT; i++) {
|
2022-09-07 15:19:20 +02:00
|
|
|
if (!(variantBitmask & (1 << i)))
|
|
|
|
continue;
|
|
|
|
RenderPassType rpType = (RenderPassType)i;
|
2022-09-24 22:39:22 +02:00
|
|
|
|
|
|
|
// Sanity check - don't compile incompatible types (could be caused by corrupt caches, changes in data structures, etc).
|
2022-12-13 23:06:57 +01:00
|
|
|
if ((pipelineFlags & PipelineFlags::USES_DEPTH_STENCIL) && !RenderPassTypeHasDepth(rpType)) {
|
|
|
|
WARN_LOG(G3D, "Not compiling pipeline that requires depth, for non depth renderpass type");
|
|
|
|
continue;
|
2022-09-24 22:39:22 +02:00
|
|
|
}
|
2022-12-13 23:06:57 +01:00
|
|
|
if ((pipelineFlags & PipelineFlags::USES_INPUT_ATTACHMENT) && !RenderPassTypeHasInput(rpType)) {
|
|
|
|
WARN_LOG(G3D, "Not compiling pipeline that requires input attachment, for non input renderpass type");
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
// Shouldn't hit this, these should have been filtered elsewhere. However, still a good check to do.
|
|
|
|
if (sampleCount == VK_SAMPLE_COUNT_1_BIT && RenderPassTypeHasMultisample(rpType)) {
|
|
|
|
WARN_LOG(G3D, "Not compiling single sample pipeline for a multisampled render pass type");
|
|
|
|
continue;
|
2022-09-24 22:39:22 +02:00
|
|
|
}
|
|
|
|
|
2022-11-05 22:06:53 +01:00
|
|
|
pipeline->pipeline[i] = Promise<VkPipeline>::CreateEmpty();
|
2022-11-28 11:50:28 +01:00
|
|
|
compileQueue_.push_back(CompileQueueEntry(pipeline, compatibleRenderPass->Get(vulkan_, rpType, sampleCount), rpType, sampleCount));
|
2022-09-10 08:30:09 -07:00
|
|
|
needsCompile = true;
|
2022-09-07 15:19:20 +02:00
|
|
|
}
|
2022-09-10 08:30:09 -07:00
|
|
|
if (needsCompile)
|
|
|
|
compileCond_.notify_one();
|
2022-09-07 15:19:20 +02:00
|
|
|
compileMutex_.unlock();
|
|
|
|
}
|
|
|
|
return pipeline;
|
|
|
|
}
|
|
|
|
|
|
|
|
VKRComputePipeline *VulkanRenderManager::CreateComputePipeline(VKRComputePipelineDesc *desc) {
|
|
|
|
VKRComputePipeline *pipeline = new VKRComputePipeline();
|
|
|
|
pipeline->desc = desc;
|
|
|
|
compileMutex_.lock();
|
|
|
|
compileQueue_.push_back(CompileQueueEntry(pipeline));
|
|
|
|
compileCond_.notify_one();
|
|
|
|
compileMutex_.unlock();
|
|
|
|
return pipeline;
|
|
|
|
}
|
|
|
|
|
2020-10-11 11:47:24 +02:00
|
|
|
void VulkanRenderManager::EndCurRenderStep() {
|
2022-09-06 13:30:18 +02:00
|
|
|
if (!curRenderStep_)
|
|
|
|
return;
|
|
|
|
|
|
|
|
RPKey key{
|
|
|
|
curRenderStep_->render.colorLoad, curRenderStep_->render.depthLoad, curRenderStep_->render.stencilLoad,
|
|
|
|
curRenderStep_->render.colorStore, curRenderStep_->render.depthStore, curRenderStep_->render.stencilStore,
|
|
|
|
};
|
2022-09-20 16:27:05 +02:00
|
|
|
// Save the accumulated pipeline flags so we can use that to configure the render pass.
|
|
|
|
// We'll often be able to avoid loading/saving the depth/stencil buffer.
|
2022-09-06 13:30:18 +02:00
|
|
|
curRenderStep_->render.pipelineFlags = curPipelineFlags_;
|
2022-09-21 13:12:58 +02:00
|
|
|
bool depthStencil = (curPipelineFlags_ & PipelineFlags::USES_DEPTH_STENCIL) != 0;
|
2022-11-05 22:06:53 +01:00
|
|
|
RenderPassType rpType = depthStencil ? RenderPassType::HAS_DEPTH : RenderPassType::DEFAULT;
|
2022-11-24 10:03:16 +01:00
|
|
|
|
|
|
|
if (curRenderStep_->render.framebuffer && (rpType & RenderPassType::HAS_DEPTH) && !curRenderStep_->render.framebuffer->HasDepth()) {
|
|
|
|
WARN_LOG(G3D, "Trying to render with a depth-writing pipeline to a framebuffer without depth: %s", curRenderStep_->render.framebuffer->Tag());
|
|
|
|
rpType = RenderPassType::DEFAULT;
|
|
|
|
}
|
|
|
|
|
2022-09-06 13:30:18 +02:00
|
|
|
if (!curRenderStep_->render.framebuffer) {
|
2022-11-05 22:06:53 +01:00
|
|
|
rpType = RenderPassType::BACKBUFFER;
|
2022-10-18 00:26:10 +02:00
|
|
|
} else {
|
|
|
|
if (curPipelineFlags_ & PipelineFlags::USES_INPUT_ATTACHMENT) {
|
|
|
|
// Not allowed on backbuffers.
|
2022-11-05 22:06:53 +01:00
|
|
|
rpType = depthStencil ? (RenderPassType::HAS_DEPTH | RenderPassType::COLOR_INPUT) : RenderPassType::COLOR_INPUT;
|
2022-10-18 00:26:10 +02:00
|
|
|
}
|
|
|
|
// Framebuffers can be stereo, and if so, will control the render pass type to match.
|
|
|
|
// Pipelines can be mono and render fine to stereo etc, so not checking them here.
|
2022-10-23 11:21:35 +02:00
|
|
|
// Note that we don't support rendering to just one layer of a multilayer framebuffer!
|
2022-10-18 00:26:10 +02:00
|
|
|
if (curRenderStep_->render.framebuffer->numLayers > 1) {
|
2022-11-05 22:06:53 +01:00
|
|
|
rpType = (RenderPassType)(rpType | RenderPassType::MULTIVIEW);
|
2022-10-18 00:26:10 +02:00
|
|
|
}
|
2022-11-27 11:39:44 +01:00
|
|
|
|
|
|
|
if (curRenderStep_->render.framebuffer->sampleCount != VK_SAMPLE_COUNT_1_BIT) {
|
|
|
|
rpType = (RenderPassType)(rpType | RenderPassType::MULTISAMPLE);
|
|
|
|
}
|
2022-09-06 13:30:18 +02:00
|
|
|
}
|
2022-10-18 00:26:10 +02:00
|
|
|
|
2022-09-06 13:30:18 +02:00
|
|
|
VKRRenderPass *renderPass = queueRunner_.GetRenderPass(key);
|
|
|
|
curRenderStep_->render.renderPassType = rpType;
|
|
|
|
|
2022-11-27 11:39:44 +01:00
|
|
|
VkSampleCountFlagBits sampleCount = curRenderStep_->render.framebuffer ? curRenderStep_->render.framebuffer->sampleCount : VK_SAMPLE_COUNT_1_BIT;
|
|
|
|
|
2022-09-06 13:30:18 +02:00
|
|
|
compileMutex_.lock();
|
2022-09-10 08:30:09 -07:00
|
|
|
bool needsCompile = false;
|
2022-09-07 15:19:20 +02:00
|
|
|
for (VKRGraphicsPipeline *pipeline : pipelinesToCheck_) {
|
2022-12-31 11:30:47 +01:00
|
|
|
if (!pipeline) {
|
|
|
|
// Not good, but let's try not to crash.
|
|
|
|
continue;
|
|
|
|
}
|
2022-11-05 22:06:53 +01:00
|
|
|
if (!pipeline->pipeline[(size_t)rpType]) {
|
|
|
|
pipeline->pipeline[(size_t)rpType] = Promise<VkPipeline>::CreateEmpty();
|
2022-12-18 21:58:20 +01:00
|
|
|
_assert_(renderPass);
|
2022-11-28 11:50:28 +01:00
|
|
|
compileQueue_.push_back(CompileQueueEntry(pipeline, renderPass->Get(vulkan_, rpType, sampleCount), rpType, sampleCount));
|
2022-09-10 08:30:09 -07:00
|
|
|
needsCompile = true;
|
2022-09-07 15:19:20 +02:00
|
|
|
}
|
2022-09-06 13:30:18 +02:00
|
|
|
}
|
2022-09-10 08:30:09 -07:00
|
|
|
if (needsCompile)
|
|
|
|
compileCond_.notify_one();
|
2022-09-06 13:30:18 +02:00
|
|
|
compileMutex_.unlock();
|
2022-09-07 15:19:20 +02:00
|
|
|
pipelinesToCheck_.clear();
|
2022-09-06 13:30:18 +02:00
|
|
|
|
|
|
|
// We don't do this optimization for very small targets, probably not worth it.
|
|
|
|
if (!curRenderArea_.Empty() && (curWidth_ > 32 && curHeight_ > 32)) {
|
|
|
|
curRenderStep_->render.renderArea = curRenderArea_.ToVkRect2D();
|
|
|
|
} else {
|
|
|
|
curRenderStep_->render.renderArea.offset = {};
|
|
|
|
curRenderStep_->render.renderArea.extent = { (uint32_t)curWidth_, (uint32_t)curHeight_ };
|
2020-10-11 11:47:24 +02:00
|
|
|
}
|
2022-09-06 13:30:18 +02:00
|
|
|
curRenderArea_.Reset();
|
|
|
|
|
|
|
|
// We no longer have a current render step.
|
|
|
|
curRenderStep_ = nullptr;
|
2022-02-19 20:40:27 +01:00
|
|
|
curPipelineFlags_ = (PipelineFlags)0;
|
|
|
|
}
|
|
|
|
|
|
|
|
void VulkanRenderManager::BindCurrentFramebufferAsInputAttachment0(VkImageAspectFlags aspectBits) {
|
|
|
|
_dbg_assert_(curRenderStep_);
|
|
|
|
curRenderStep_->commands.push_back(VkRenderData{ VKRRenderCommand::SELF_DEPENDENCY_BARRIER });
|
2020-10-11 11:47:24 +02:00
|
|
|
}
|
|
|
|
|
2022-10-23 11:21:35 +02:00
|
|
|
void VulkanRenderManager::BindFramebufferAsRenderTarget(VKRFramebuffer *fb, VKRRenderPassLoadAction color, VKRRenderPassLoadAction depth, VKRRenderPassLoadAction stencil, uint32_t clearColor, float clearDepth, uint8_t clearStencil, const char *tag) {
|
2020-08-02 15:41:00 +02:00
|
|
|
_dbg_assert_(insideFrame_);
|
2020-11-15 22:30:16 +01:00
|
|
|
// Eliminate dupes (bind of the framebuffer we already are rendering to), instantly convert to a clear if possible.
|
2020-05-21 11:24:05 +02:00
|
|
|
if (!steps_.empty() && steps_.back()->stepType == VKRStepType::RENDER && steps_.back()->render.framebuffer == fb) {
|
2020-05-21 11:54:48 +02:00
|
|
|
u32 clearMask = 0;
|
2022-06-11 13:22:40 +02:00
|
|
|
if (color == VKRRenderPassLoadAction::CLEAR) {
|
2020-05-21 11:54:48 +02:00
|
|
|
clearMask |= VK_IMAGE_ASPECT_COLOR_BIT;
|
|
|
|
}
|
2022-06-11 13:22:40 +02:00
|
|
|
if (depth == VKRRenderPassLoadAction::CLEAR) {
|
2020-05-21 11:54:48 +02:00
|
|
|
clearMask |= VK_IMAGE_ASPECT_DEPTH_BIT;
|
2022-09-21 13:24:39 +02:00
|
|
|
curPipelineFlags_ |= PipelineFlags::USES_DEPTH_STENCIL;
|
2020-05-21 11:54:48 +02:00
|
|
|
}
|
2022-06-11 13:22:40 +02:00
|
|
|
if (stencil == VKRRenderPassLoadAction::CLEAR) {
|
2020-05-21 11:54:48 +02:00
|
|
|
clearMask |= VK_IMAGE_ASPECT_STENCIL_BIT;
|
2022-09-21 13:24:39 +02:00
|
|
|
curPipelineFlags_ |= PipelineFlags::USES_DEPTH_STENCIL;
|
2020-05-21 11:54:48 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// If we need a clear and the previous step has commands already, it's best to just add a clear and keep going.
|
|
|
|
// If there's no clear needed, let's also do that.
|
|
|
|
//
|
|
|
|
// However, if we do need a clear and there are no commands in the previous pass,
|
|
|
|
// we want the queuerunner to have the opportunity to merge, so we'll go ahead and make a new renderpass.
|
|
|
|
if (clearMask == 0 || !steps_.back()->commands.empty()) {
|
2020-05-17 20:40:22 -07:00
|
|
|
curRenderStep_ = steps_.back();
|
|
|
|
curStepHasViewport_ = false;
|
2020-05-18 23:21:03 +02:00
|
|
|
curStepHasScissor_ = false;
|
2020-05-17 20:40:22 -07:00
|
|
|
for (const auto &c : steps_.back()->commands) {
|
|
|
|
if (c.cmd == VKRRenderCommand::VIEWPORT) {
|
|
|
|
curStepHasViewport_ = true;
|
2020-05-18 23:21:03 +02:00
|
|
|
} else if (c.cmd == VKRRenderCommand::SCISSOR) {
|
|
|
|
curStepHasScissor_ = true;
|
2020-05-17 20:40:22 -07:00
|
|
|
}
|
|
|
|
}
|
2020-05-21 11:54:48 +02:00
|
|
|
if (clearMask != 0) {
|
|
|
|
VkRenderData data{ VKRRenderCommand::CLEAR };
|
|
|
|
data.clear.clearColor = clearColor;
|
|
|
|
data.clear.clearZ = clearDepth;
|
|
|
|
data.clear.clearStencil = clearStencil;
|
|
|
|
data.clear.clearMask = clearMask;
|
|
|
|
curRenderStep_->commands.push_back(data);
|
2020-11-15 22:30:16 +01:00
|
|
|
curRenderArea_.SetRect(0, 0, curWidth_, curHeight_);
|
2020-05-21 11:54:48 +02:00
|
|
|
}
|
2017-08-22 13:25:45 +02:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
2019-08-08 15:15:52 +02:00
|
|
|
|
|
|
|
// More redundant bind elimination.
|
2020-10-11 11:47:24 +02:00
|
|
|
if (curRenderStep_) {
|
|
|
|
if (curRenderStep_->commands.empty()) {
|
2022-06-11 13:22:40 +02:00
|
|
|
if (curRenderStep_->render.colorLoad != VKRRenderPassLoadAction::CLEAR && curRenderStep_->render.depthLoad != VKRRenderPassLoadAction::CLEAR && curRenderStep_->render.stencilLoad != VKRRenderPassLoadAction::CLEAR) {
|
2020-10-11 11:47:24 +02:00
|
|
|
// Can trivially kill the last empty render step.
|
|
|
|
_dbg_assert_(steps_.back() == curRenderStep_);
|
|
|
|
delete steps_.back();
|
|
|
|
steps_.pop_back();
|
|
|
|
curRenderStep_ = nullptr;
|
|
|
|
}
|
|
|
|
VLOG("Empty render step. Usually happens after uploading pixels..");
|
|
|
|
}
|
|
|
|
|
|
|
|
EndCurRenderStep();
|
2017-10-31 12:02:10 +01:00
|
|
|
}
|
2017-08-22 13:25:45 +02:00
|
|
|
|
2020-05-25 12:14:03 -07:00
|
|
|
// Older Mali drivers have issues with depth and stencil don't match load/clear/etc.
|
|
|
|
// TODO: Determine which versions and do this only where necessary.
|
|
|
|
u32 lateClearMask = 0;
|
|
|
|
if (depth != stencil && vulkan_->GetPhysicalDeviceProperties().properties.vendorID == VULKAN_VENDOR_ARM) {
|
2022-06-11 13:22:40 +02:00
|
|
|
if (stencil == VKRRenderPassLoadAction::DONT_CARE) {
|
2020-05-25 12:14:03 -07:00
|
|
|
stencil = depth;
|
2022-06-11 13:22:40 +02:00
|
|
|
} else if (depth == VKRRenderPassLoadAction::DONT_CARE) {
|
2020-05-25 12:14:03 -07:00
|
|
|
depth = stencil;
|
2022-06-11 13:22:40 +02:00
|
|
|
} else if (stencil == VKRRenderPassLoadAction::CLEAR) {
|
2020-05-25 12:14:03 -07:00
|
|
|
depth = stencil;
|
|
|
|
lateClearMask |= VK_IMAGE_ASPECT_STENCIL_BIT;
|
2022-06-11 13:22:40 +02:00
|
|
|
} else if (depth == VKRRenderPassLoadAction::CLEAR) {
|
2020-05-25 12:14:03 -07:00
|
|
|
stencil = depth;
|
|
|
|
lateClearMask |= VK_IMAGE_ASPECT_DEPTH_BIT;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-08-22 12:55:30 +02:00
|
|
|
VKRStep *step = new VKRStep{ VKRStepType::RENDER };
|
|
|
|
step->render.framebuffer = fb;
|
2022-06-11 13:22:40 +02:00
|
|
|
step->render.colorLoad = color;
|
|
|
|
step->render.depthLoad = depth;
|
|
|
|
step->render.stencilLoad = stencil;
|
2022-08-28 23:16:48 +02:00
|
|
|
step->render.colorStore = VKRRenderPassStoreAction::STORE;
|
|
|
|
step->render.depthStore = VKRRenderPassStoreAction::STORE;
|
|
|
|
step->render.stencilStore = VKRRenderPassStoreAction::STORE;
|
2017-08-22 12:55:30 +02:00
|
|
|
step->render.clearColor = clearColor;
|
|
|
|
step->render.clearDepth = clearDepth;
|
|
|
|
step->render.clearStencil = clearStencil;
|
2017-08-22 13:25:45 +02:00
|
|
|
step->render.numDraws = 0;
|
2018-05-27 14:14:02 -07:00
|
|
|
step->render.numReads = 0;
|
2017-08-22 13:25:45 +02:00
|
|
|
step->render.finalColorLayout = !fb ? VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL : VK_IMAGE_LAYOUT_UNDEFINED;
|
2017-11-22 12:24:05 +01:00
|
|
|
step->render.finalDepthStencilLayout = !fb ? VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL : VK_IMAGE_LAYOUT_UNDEFINED;
|
2020-05-21 11:24:05 +02:00
|
|
|
step->tag = tag;
|
2017-08-22 12:55:30 +02:00
|
|
|
steps_.push_back(step);
|
2017-08-22 13:25:45 +02:00
|
|
|
|
2020-05-16 18:30:59 -07:00
|
|
|
if (fb) {
|
|
|
|
// If there's a KEEP, we naturally read from the framebuffer.
|
2022-06-11 13:22:40 +02:00
|
|
|
if (color == VKRRenderPassLoadAction::KEEP || depth == VKRRenderPassLoadAction::KEEP || stencil == VKRRenderPassLoadAction::KEEP) {
|
2020-05-16 18:30:59 -07:00
|
|
|
step->dependencies.insert(fb);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-08-22 12:55:30 +02:00
|
|
|
curRenderStep_ = step;
|
2020-05-17 20:40:22 -07:00
|
|
|
curStepHasViewport_ = false;
|
2020-05-18 23:21:03 +02:00
|
|
|
curStepHasScissor_ = false;
|
2019-08-08 14:07:53 +02:00
|
|
|
if (fb) {
|
2020-10-11 13:07:08 +02:00
|
|
|
curWidthRaw_ = fb->width;
|
|
|
|
curHeightRaw_ = fb->height;
|
2019-08-08 14:07:53 +02:00
|
|
|
curWidth_ = fb->width;
|
|
|
|
curHeight_ = fb->height;
|
|
|
|
} else {
|
2020-10-11 13:07:08 +02:00
|
|
|
curWidthRaw_ = vulkan_->GetBackbufferWidth();
|
|
|
|
curHeightRaw_ = vulkan_->GetBackbufferHeight();
|
|
|
|
if (g_display_rotation == DisplayRotation::ROTATE_90 || g_display_rotation == DisplayRotation::ROTATE_270) {
|
|
|
|
curWidth_ = curHeightRaw_;
|
|
|
|
curHeight_ = curWidthRaw_;
|
|
|
|
} else {
|
|
|
|
curWidth_ = curWidthRaw_;
|
|
|
|
curHeight_ = curHeightRaw_;
|
|
|
|
}
|
2019-08-08 14:07:53 +02:00
|
|
|
}
|
2020-05-25 12:14:03 -07:00
|
|
|
|
2022-06-11 13:22:40 +02:00
|
|
|
if (color == VKRRenderPassLoadAction::CLEAR || depth == VKRRenderPassLoadAction::CLEAR || stencil == VKRRenderPassLoadAction::CLEAR) {
|
2020-11-15 22:30:16 +01:00
|
|
|
curRenderArea_.SetRect(0, 0, curWidth_, curHeight_);
|
|
|
|
}
|
|
|
|
|
2020-05-25 12:14:03 -07:00
|
|
|
// See above - we add a clear afterward if only one side for depth/stencil CLEAR/KEEP.
|
|
|
|
if (lateClearMask != 0) {
|
|
|
|
VkRenderData data{ VKRRenderCommand::CLEAR };
|
|
|
|
data.clear.clearColor = clearColor;
|
|
|
|
data.clear.clearZ = clearDepth;
|
|
|
|
data.clear.clearStencil = clearStencil;
|
|
|
|
data.clear.clearMask = lateClearMask;
|
|
|
|
curRenderStep_->commands.push_back(data);
|
|
|
|
}
|
2022-11-24 10:38:49 +01:00
|
|
|
|
|
|
|
if (invalidationCallback_) {
|
2022-12-01 19:15:38 +01:00
|
|
|
invalidationCallback_(InvalidationCallbackFlags::RENDER_PASS_STATE);
|
2022-11-24 10:38:49 +01:00
|
|
|
}
|
2017-08-19 17:32:10 +02:00
|
|
|
}
|
|
|
|
|
2017-11-22 12:24:05 +01:00
|
|
|
bool VulkanRenderManager::CopyFramebufferToMemorySync(VKRFramebuffer *src, VkImageAspectFlags aspectBits, int x, int y, int w, int h, Draw::DataFormat destFormat, uint8_t *pixels, int pixelStride, const char *tag) {
|
2020-08-02 15:41:00 +02:00
|
|
|
_dbg_assert_(insideFrame_);
|
2018-05-27 14:14:02 -07:00
|
|
|
for (int i = (int)steps_.size() - 1; i >= 0; i--) {
|
|
|
|
if (steps_[i]->stepType == VKRStepType::RENDER && steps_[i]->render.framebuffer == src) {
|
|
|
|
steps_[i]->render.numReads++;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-10-11 11:47:24 +02:00
|
|
|
EndCurRenderStep();
|
|
|
|
|
2017-10-28 18:03:27 +02:00
|
|
|
VKRStep *step = new VKRStep{ VKRStepType::READBACK };
|
|
|
|
step->readback.aspectMask = aspectBits;
|
|
|
|
step->readback.src = src;
|
|
|
|
step->readback.srcRect.offset = { x, y };
|
|
|
|
step->readback.srcRect.extent = { (uint32_t)w, (uint32_t)h };
|
2020-05-16 18:30:59 -07:00
|
|
|
step->dependencies.insert(src);
|
2020-05-21 11:24:05 +02:00
|
|
|
step->tag = tag;
|
2017-10-28 18:03:27 +02:00
|
|
|
steps_.push_back(step);
|
|
|
|
|
|
|
|
FlushSync();
|
|
|
|
|
2018-06-24 07:34:07 -07:00
|
|
|
Draw::DataFormat srcFormat = Draw::DataFormat::UNDEFINED;
|
2017-11-05 18:03:53 -08:00
|
|
|
if (aspectBits & VK_IMAGE_ASPECT_COLOR_BIT) {
|
2017-11-15 13:18:29 +01:00
|
|
|
if (src) {
|
|
|
|
switch (src->color.format) {
|
|
|
|
case VK_FORMAT_R8G8B8A8_UNORM: srcFormat = Draw::DataFormat::R8G8B8A8_UNORM; break;
|
2017-12-03 18:39:26 -08:00
|
|
|
default: _assert_(false);
|
2017-11-15 13:18:29 +01:00
|
|
|
}
|
2018-06-16 13:47:51 -07:00
|
|
|
} else {
|
2017-11-15 13:18:29 +01:00
|
|
|
// Backbuffer.
|
2017-11-15 13:57:22 +01:00
|
|
|
if (!(vulkan_->GetSurfaceCapabilities().supportedUsageFlags & VK_IMAGE_USAGE_TRANSFER_SRC_BIT)) {
|
2020-08-15 15:51:41 +02:00
|
|
|
ERROR_LOG(G3D, "Copying from backbuffer not supported, can't take screenshots");
|
2017-11-15 13:57:22 +01:00
|
|
|
return false;
|
|
|
|
}
|
2017-11-15 13:18:29 +01:00
|
|
|
switch (vulkan_->GetSwapchainFormat()) {
|
|
|
|
case VK_FORMAT_B8G8R8A8_UNORM: srcFormat = Draw::DataFormat::B8G8R8A8_UNORM; break;
|
|
|
|
case VK_FORMAT_R8G8B8A8_UNORM: srcFormat = Draw::DataFormat::R8G8B8A8_UNORM; break;
|
|
|
|
// NOTE: If you add supported formats here, make sure to also support them in VulkanQueueRunner::CopyReadbackBuffer.
|
|
|
|
default:
|
2020-08-15 15:51:41 +02:00
|
|
|
ERROR_LOG(G3D, "Unsupported backbuffer format for screenshots");
|
2017-11-15 13:18:29 +01:00
|
|
|
return false;
|
|
|
|
}
|
2017-11-05 18:03:53 -08:00
|
|
|
}
|
2017-11-05 18:40:27 -08:00
|
|
|
} else if (aspectBits & VK_IMAGE_ASPECT_STENCIL_BIT) {
|
|
|
|
// Copies from stencil are always S8.
|
|
|
|
srcFormat = Draw::DataFormat::S8;
|
|
|
|
} else if (aspectBits & VK_IMAGE_ASPECT_DEPTH_BIT) {
|
2017-11-05 18:03:53 -08:00
|
|
|
switch (src->depth.format) {
|
|
|
|
case VK_FORMAT_D24_UNORM_S8_UINT: srcFormat = Draw::DataFormat::D24_S8; break;
|
2017-11-05 18:40:27 -08:00
|
|
|
case VK_FORMAT_D32_SFLOAT_S8_UINT: srcFormat = Draw::DataFormat::D32F; break;
|
2017-11-05 18:03:53 -08:00
|
|
|
case VK_FORMAT_D16_UNORM_S8_UINT: srcFormat = Draw::DataFormat::D16; break;
|
2017-12-03 18:39:26 -08:00
|
|
|
default: _assert_(false);
|
2017-11-05 18:03:53 -08:00
|
|
|
}
|
|
|
|
} else {
|
2017-12-03 18:39:26 -08:00
|
|
|
_assert_(false);
|
2017-11-05 18:03:53 -08:00
|
|
|
}
|
2022-08-29 23:59:43 +02:00
|
|
|
|
2017-11-05 18:03:53 -08:00
|
|
|
// Need to call this after FlushSync so the pixels are guaranteed to be ready in CPU-accessible VRAM.
|
|
|
|
queueRunner_.CopyReadbackBuffer(w, h, srcFormat, destFormat, pixelStride, pixels);
|
2017-11-15 13:18:29 +01:00
|
|
|
return true;
|
2017-11-05 18:03:53 -08:00
|
|
|
}
|
|
|
|
|
2020-05-21 11:24:05 +02:00
|
|
|
void VulkanRenderManager::CopyImageToMemorySync(VkImage image, int mipLevel, int x, int y, int w, int h, Draw::DataFormat destFormat, uint8_t *pixels, int pixelStride, const char *tag) {
|
2020-08-02 15:41:00 +02:00
|
|
|
_dbg_assert_(insideFrame_);
|
2020-10-11 11:47:24 +02:00
|
|
|
|
|
|
|
EndCurRenderStep();
|
|
|
|
|
2017-11-05 18:03:53 -08:00
|
|
|
VKRStep *step = new VKRStep{ VKRStepType::READBACK_IMAGE };
|
|
|
|
step->readback_image.image = image;
|
|
|
|
step->readback_image.srcRect.offset = { x, y };
|
|
|
|
step->readback_image.srcRect.extent = { (uint32_t)w, (uint32_t)h };
|
|
|
|
step->readback_image.mipLevel = mipLevel;
|
2020-05-21 11:24:05 +02:00
|
|
|
step->tag = tag;
|
2017-11-05 18:03:53 -08:00
|
|
|
steps_.push_back(step);
|
|
|
|
|
|
|
|
FlushSync();
|
|
|
|
|
|
|
|
// Need to call this after FlushSync so the pixels are guaranteed to be ready in CPU-accessible VRAM.
|
|
|
|
queueRunner_.CopyReadbackBuffer(w, h, destFormat, destFormat, pixelStride, pixels);
|
2017-10-28 18:03:27 +02:00
|
|
|
}
|
|
|
|
|
2018-05-03 07:05:36 -07:00
|
|
|
static void RemoveDrawCommands(std::vector<VkRenderData> *cmds) {
|
|
|
|
// Here we remove any DRAW type commands when we hit a CLEAR.
|
2018-05-03 08:04:21 -07:00
|
|
|
for (auto &c : *cmds) {
|
|
|
|
if (c.cmd == VKRRenderCommand::DRAW || c.cmd == VKRRenderCommand::DRAW_INDEXED) {
|
|
|
|
c.cmd = VKRRenderCommand::REMOVED;
|
|
|
|
}
|
|
|
|
}
|
2018-05-03 07:05:36 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
static void CleanupRenderCommands(std::vector<VkRenderData> *cmds) {
|
2018-05-03 22:18:26 +02:00
|
|
|
size_t lastCommand[(int)VKRRenderCommand::NUM_RENDER_COMMANDS];
|
2018-05-03 07:05:36 -07:00
|
|
|
memset(lastCommand, -1, sizeof(lastCommand));
|
|
|
|
|
|
|
|
// Find any duplicate state commands (likely from RemoveDrawCommands.)
|
|
|
|
for (size_t i = 0; i < cmds->size(); ++i) {
|
|
|
|
auto &c = cmds->at(i);
|
|
|
|
auto &lastOfCmd = lastCommand[(uint8_t)c.cmd];
|
|
|
|
|
|
|
|
switch (c.cmd) {
|
|
|
|
case VKRRenderCommand::REMOVED:
|
|
|
|
continue;
|
|
|
|
|
|
|
|
case VKRRenderCommand::VIEWPORT:
|
|
|
|
case VKRRenderCommand::SCISSOR:
|
|
|
|
case VKRRenderCommand::BLEND:
|
|
|
|
case VKRRenderCommand::STENCIL:
|
|
|
|
if (lastOfCmd != -1) {
|
|
|
|
cmds->at(lastOfCmd).cmd = VKRRenderCommand::REMOVED;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
|
|
|
|
case VKRRenderCommand::PUSH_CONSTANTS:
|
|
|
|
// TODO: For now, we have to keep this one (it has an offset.) Still update lastCommand.
|
|
|
|
break;
|
|
|
|
|
|
|
|
case VKRRenderCommand::CLEAR:
|
|
|
|
// Ignore, doesn't participate in state.
|
|
|
|
continue;
|
|
|
|
|
|
|
|
case VKRRenderCommand::DRAW_INDEXED:
|
|
|
|
case VKRRenderCommand::DRAW:
|
|
|
|
default:
|
|
|
|
// Boundary - must keep state before this.
|
|
|
|
memset(lastCommand, -1, sizeof(lastCommand));
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
lastOfCmd = i;
|
|
|
|
}
|
|
|
|
|
|
|
|
// At this point, anything in lastCommand can be cleaned up too.
|
|
|
|
// Note that it's safe to remove the last unused PUSH_CONSTANTS here.
|
|
|
|
for (size_t i = 0; i < ARRAY_SIZE(lastCommand); ++i) {
|
|
|
|
auto &lastOfCmd = lastCommand[i];
|
|
|
|
if (lastOfCmd != -1) {
|
|
|
|
cmds->at(lastOfCmd).cmd = VKRRenderCommand::REMOVED;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-08-16 23:03:30 +02:00
|
|
|
void VulkanRenderManager::Clear(uint32_t clearColor, float clearZ, int clearStencil, int clearMask) {
|
2020-07-19 17:47:02 +02:00
|
|
|
_dbg_assert_(curRenderStep_ && curRenderStep_->stepType == VKRStepType::RENDER);
|
2018-04-06 23:29:44 +02:00
|
|
|
if (!clearMask)
|
|
|
|
return;
|
2022-11-24 10:03:16 +01:00
|
|
|
|
2018-05-02 19:23:49 -07:00
|
|
|
// If this is the first drawing command or clears everything, merge it into the pass.
|
|
|
|
int allAspects = VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
|
|
|
|
if (curRenderStep_->render.numDraws == 0 || clearMask == allAspects) {
|
2017-08-22 12:55:30 +02:00
|
|
|
curRenderStep_->render.clearColor = clearColor;
|
|
|
|
curRenderStep_->render.clearDepth = clearZ;
|
|
|
|
curRenderStep_->render.clearStencil = clearStencil;
|
2022-06-11 13:22:40 +02:00
|
|
|
curRenderStep_->render.colorLoad = (clearMask & VK_IMAGE_ASPECT_COLOR_BIT) ? VKRRenderPassLoadAction::CLEAR : VKRRenderPassLoadAction::KEEP;
|
|
|
|
curRenderStep_->render.depthLoad = (clearMask & VK_IMAGE_ASPECT_DEPTH_BIT) ? VKRRenderPassLoadAction::CLEAR : VKRRenderPassLoadAction::KEEP;
|
|
|
|
curRenderStep_->render.stencilLoad = (clearMask & VK_IMAGE_ASPECT_STENCIL_BIT) ? VKRRenderPassLoadAction::CLEAR : VKRRenderPassLoadAction::KEEP;
|
2018-05-02 19:23:49 -07:00
|
|
|
|
2022-09-21 13:24:39 +02:00
|
|
|
if (clearMask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) {
|
2022-11-24 10:03:16 +01:00
|
|
|
if (curRenderStep_->render.framebuffer && !curRenderStep_->render.framebuffer->HasDepth()) {
|
|
|
|
WARN_LOG(G3D, "Trying to clear depth/stencil on a non-depth framebuffer: %s", curRenderStep_->render.framebuffer->Tag());
|
|
|
|
} else {
|
|
|
|
curPipelineFlags_ |= PipelineFlags::USES_DEPTH_STENCIL;
|
|
|
|
}
|
2022-09-21 13:24:39 +02:00
|
|
|
}
|
|
|
|
|
2018-05-02 19:23:49 -07:00
|
|
|
// In case there were commands already.
|
|
|
|
curRenderStep_->render.numDraws = 0;
|
2018-05-03 07:05:36 -07:00
|
|
|
RemoveDrawCommands(&curRenderStep_->commands);
|
2017-08-16 23:03:30 +02:00
|
|
|
} else {
|
2017-08-22 12:55:30 +02:00
|
|
|
VkRenderData data{ VKRRenderCommand::CLEAR };
|
2017-08-16 23:03:30 +02:00
|
|
|
data.clear.clearColor = clearColor;
|
|
|
|
data.clear.clearZ = clearZ;
|
|
|
|
data.clear.clearStencil = clearStencil;
|
|
|
|
data.clear.clearMask = clearMask;
|
2017-08-22 12:55:30 +02:00
|
|
|
curRenderStep_->commands.push_back(data);
|
2017-08-19 17:32:10 +02:00
|
|
|
}
|
2020-10-11 13:07:08 +02:00
|
|
|
|
|
|
|
curRenderArea_.SetRect(0, 0, curWidth_, curHeight_);
|
2017-08-19 17:32:10 +02:00
|
|
|
}
|
|
|
|
|
2017-11-22 12:24:05 +01:00
|
|
|
void VulkanRenderManager::CopyFramebuffer(VKRFramebuffer *src, VkRect2D srcRect, VKRFramebuffer *dst, VkOffset2D dstPos, VkImageAspectFlags aspectMask, const char *tag) {
|
2020-07-19 17:47:02 +02:00
|
|
|
_dbg_assert_msg_(srcRect.offset.x >= 0, "srcrect offset x (%d) < 0", srcRect.offset.x);
|
|
|
|
_dbg_assert_msg_(srcRect.offset.y >= 0, "srcrect offset y (%d) < 0", srcRect.offset.y);
|
|
|
|
_dbg_assert_msg_(srcRect.offset.x + srcRect.extent.width <= (uint32_t)src->width, "srcrect offset x (%d) + extent (%d) > width (%d)", srcRect.offset.x, srcRect.extent.width, (uint32_t)src->width);
|
|
|
|
_dbg_assert_msg_(srcRect.offset.y + srcRect.extent.height <= (uint32_t)src->height, "srcrect offset y (%d) + extent (%d) > height (%d)", srcRect.offset.y, srcRect.extent.height, (uint32_t)src->height);
|
2018-02-26 14:23:19 +01:00
|
|
|
|
2020-07-19 17:47:02 +02:00
|
|
|
_dbg_assert_msg_(srcRect.extent.width > 0, "copy srcwidth == 0");
|
|
|
|
_dbg_assert_msg_(srcRect.extent.height > 0, "copy srcheight == 0");
|
2018-02-26 14:23:19 +01:00
|
|
|
|
2020-07-19 17:47:02 +02:00
|
|
|
_dbg_assert_msg_(dstPos.x >= 0, "dstPos offset x (%d) < 0", dstPos.x);
|
|
|
|
_dbg_assert_msg_(dstPos.y >= 0, "dstPos offset y (%d) < 0", dstPos.y);
|
|
|
|
_dbg_assert_msg_(dstPos.x + srcRect.extent.width <= (uint32_t)dst->width, "dstPos + extent x > width");
|
|
|
|
_dbg_assert_msg_(dstPos.y + srcRect.extent.height <= (uint32_t)dst->height, "dstPos + extent y > height");
|
2017-10-25 21:19:42 +02:00
|
|
|
|
2018-03-18 12:09:02 +01:00
|
|
|
for (int i = (int)steps_.size() - 1; i >= 0; i--) {
|
2018-03-18 13:14:20 +01:00
|
|
|
if (steps_[i]->stepType == VKRStepType::RENDER && steps_[i]->render.framebuffer == src) {
|
2020-08-09 19:40:46 +02:00
|
|
|
if (aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) {
|
|
|
|
if (steps_[i]->render.finalColorLayout == VK_IMAGE_LAYOUT_UNDEFINED) {
|
|
|
|
steps_[i]->render.finalColorLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (aspectMask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) {
|
|
|
|
if (steps_[i]->render.finalDepthStencilLayout == VK_IMAGE_LAYOUT_UNDEFINED) {
|
|
|
|
steps_[i]->render.finalDepthStencilLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
|
|
|
|
}
|
2018-03-18 12:09:02 +01:00
|
|
|
}
|
2018-05-27 14:14:02 -07:00
|
|
|
steps_[i]->render.numReads++;
|
2018-03-18 13:14:20 +01:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for (int i = (int)steps_.size() - 1; i >= 0; i--) {
|
|
|
|
if (steps_[i]->stepType == VKRStepType::RENDER && steps_[i]->render.framebuffer == dst) {
|
2020-08-09 19:40:46 +02:00
|
|
|
if (aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) {
|
|
|
|
if (steps_[i]->render.finalColorLayout == VK_IMAGE_LAYOUT_UNDEFINED) {
|
|
|
|
steps_[i]->render.finalColorLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (aspectMask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) {
|
|
|
|
if (steps_[i]->render.finalDepthStencilLayout == VK_IMAGE_LAYOUT_UNDEFINED) {
|
|
|
|
steps_[i]->render.finalDepthStencilLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
|
|
|
|
}
|
2018-03-18 13:14:20 +01:00
|
|
|
}
|
|
|
|
break;
|
2018-03-18 12:09:02 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-10-11 11:47:24 +02:00
|
|
|
EndCurRenderStep();
|
|
|
|
|
2017-08-22 12:55:30 +02:00
|
|
|
VKRStep *step = new VKRStep{ VKRStepType::COPY };
|
2017-10-25 21:19:42 +02:00
|
|
|
|
2017-08-22 12:55:30 +02:00
|
|
|
step->copy.aspectMask = aspectMask;
|
|
|
|
step->copy.src = src;
|
|
|
|
step->copy.srcRect = srcRect;
|
|
|
|
step->copy.dst = dst;
|
|
|
|
step->copy.dstPos = dstPos;
|
2020-05-16 18:30:59 -07:00
|
|
|
step->dependencies.insert(src);
|
2020-05-21 11:24:05 +02:00
|
|
|
step->tag = tag;
|
2020-05-16 18:30:59 -07:00
|
|
|
bool fillsDst = dst && srcRect.offset.x == 0 && srcRect.offset.y == 0 && srcRect.extent.width == dst->width && srcRect.extent.height == dst->height;
|
|
|
|
if (dstPos.x != 0 || dstPos.y != 0 || !fillsDst)
|
|
|
|
step->dependencies.insert(dst);
|
2017-10-25 20:28:12 +02:00
|
|
|
|
2017-08-22 12:55:30 +02:00
|
|
|
steps_.push_back(step);
|
2017-08-19 17:32:10 +02:00
|
|
|
}
|
|
|
|
|
2017-11-22 12:24:05 +01:00
|
|
|
void VulkanRenderManager::BlitFramebuffer(VKRFramebuffer *src, VkRect2D srcRect, VKRFramebuffer *dst, VkRect2D dstRect, VkImageAspectFlags aspectMask, VkFilter filter, const char *tag) {
|
2020-07-19 17:47:02 +02:00
|
|
|
_dbg_assert_msg_(srcRect.offset.x >= 0, "srcrect offset x (%d) < 0", srcRect.offset.x);
|
|
|
|
_dbg_assert_msg_(srcRect.offset.y >= 0, "srcrect offset y (%d) < 0", srcRect.offset.y);
|
|
|
|
_dbg_assert_msg_(srcRect.offset.x + srcRect.extent.width <= (uint32_t)src->width, "srcrect offset x (%d) + extent (%d) > width (%d)", srcRect.offset.x, srcRect.extent.width, (uint32_t)src->width);
|
|
|
|
_dbg_assert_msg_(srcRect.offset.y + srcRect.extent.height <= (uint32_t)src->height, "srcrect offset y (%d) + extent (%d) > height (%d)", srcRect.offset.y, srcRect.extent.height, (uint32_t)src->height);
|
2017-10-25 21:19:42 +02:00
|
|
|
|
2020-07-19 17:47:02 +02:00
|
|
|
_dbg_assert_msg_(srcRect.extent.width > 0, "blit srcwidth == 0");
|
|
|
|
_dbg_assert_msg_(srcRect.extent.height > 0, "blit srcheight == 0");
|
2017-11-06 23:49:09 +01:00
|
|
|
|
2020-07-19 17:47:02 +02:00
|
|
|
_dbg_assert_msg_(dstRect.offset.x >= 0, "dstrect offset x < 0");
|
|
|
|
_dbg_assert_msg_(dstRect.offset.y >= 0, "dstrect offset y < 0");
|
|
|
|
_dbg_assert_msg_(dstRect.offset.x + dstRect.extent.width <= (uint32_t)dst->width, "dstrect offset x + extent > width");
|
|
|
|
_dbg_assert_msg_(dstRect.offset.y + dstRect.extent.height <= (uint32_t)dst->height, "dstrect offset y + extent > height");
|
2017-10-25 21:19:42 +02:00
|
|
|
|
2020-07-19 17:47:02 +02:00
|
|
|
_dbg_assert_msg_(dstRect.extent.width > 0, "blit dstwidth == 0");
|
|
|
|
_dbg_assert_msg_(dstRect.extent.height > 0, "blit dstheight == 0");
|
2017-11-06 23:49:09 +01:00
|
|
|
|
2020-08-09 19:40:46 +02:00
|
|
|
// TODO: Seem to be missing final layouts here like in Copy...
|
|
|
|
|
2018-05-27 14:14:02 -07:00
|
|
|
for (int i = (int)steps_.size() - 1; i >= 0; i--) {
|
|
|
|
if (steps_[i]->stepType == VKRStepType::RENDER && steps_[i]->render.framebuffer == src) {
|
|
|
|
steps_[i]->render.numReads++;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-10-11 11:47:24 +02:00
|
|
|
EndCurRenderStep();
|
|
|
|
|
2017-08-22 12:55:30 +02:00
|
|
|
VKRStep *step = new VKRStep{ VKRStepType::BLIT };
|
2017-10-25 21:19:42 +02:00
|
|
|
|
2017-08-22 12:55:30 +02:00
|
|
|
step->blit.aspectMask = aspectMask;
|
|
|
|
step->blit.src = src;
|
|
|
|
step->blit.srcRect = srcRect;
|
|
|
|
step->blit.dst = dst;
|
|
|
|
step->blit.dstRect = dstRect;
|
|
|
|
step->blit.filter = filter;
|
2020-05-16 18:30:59 -07:00
|
|
|
step->dependencies.insert(src);
|
2020-05-21 11:24:05 +02:00
|
|
|
step->tag = tag;
|
2020-05-16 18:30:59 -07:00
|
|
|
bool fillsDst = dst && dstRect.offset.x == 0 && dstRect.offset.y == 0 && dstRect.extent.width == dst->width && dstRect.extent.height == dst->height;
|
|
|
|
if (!fillsDst)
|
|
|
|
step->dependencies.insert(dst);
|
2017-10-25 20:28:12 +02:00
|
|
|
|
2017-08-22 12:55:30 +02:00
|
|
|
steps_.push_back(step);
|
2017-08-19 17:32:10 +02:00
|
|
|
}
|
|
|
|
|
2022-10-18 00:26:10 +02:00
|
|
|
VkImageView VulkanRenderManager::BindFramebufferAsTexture(VKRFramebuffer *fb, int binding, VkImageAspectFlags aspectBit, int layer) {
|
2020-07-19 17:47:02 +02:00
|
|
|
_dbg_assert_(curRenderStep_ != nullptr);
|
2022-09-25 23:24:54 +02:00
|
|
|
|
|
|
|
// We don't support texturing from stencil, neither do we support texturing from depth|stencil together (nonsensical).
|
|
|
|
_dbg_assert_(aspectBit == VK_IMAGE_ASPECT_COLOR_BIT || aspectBit == VK_IMAGE_ASPECT_DEPTH_BIT);
|
|
|
|
|
2019-08-13 23:07:58 +02:00
|
|
|
// Mark the dependency, check for required transitions, and return the image.
|
2019-08-12 23:36:35 +02:00
|
|
|
|
2020-08-30 10:13:16 +02:00
|
|
|
// Optimization: If possible, use final*Layout to put the texture into the correct layout "early".
|
2017-08-22 13:25:45 +02:00
|
|
|
for (int i = (int)steps_.size() - 1; i >= 0; i--) {
|
2017-08-22 12:55:30 +02:00
|
|
|
if (steps_[i]->stepType == VKRStepType::RENDER && steps_[i]->render.framebuffer == fb) {
|
2020-08-27 21:23:41 +02:00
|
|
|
if (aspectBit == VK_IMAGE_ASPECT_COLOR_BIT) {
|
2017-11-22 12:24:05 +01:00
|
|
|
// If this framebuffer was rendered to earlier in this frame, make sure to pre-transition it to the correct layout.
|
|
|
|
if (steps_[i]->render.finalColorLayout == VK_IMAGE_LAYOUT_UNDEFINED) {
|
|
|
|
steps_[i]->render.finalColorLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
|
|
|
|
}
|
2020-08-30 10:13:16 +02:00
|
|
|
// If we find some other layout, a copy after this is likely involved. It's fine though,
|
|
|
|
// we'll just transition it right as we need it and lose a tiny optimization.
|
2020-08-27 21:23:41 +02:00
|
|
|
} else if (aspectBit == VK_IMAGE_ASPECT_DEPTH_BIT) {
|
2017-11-22 12:24:05 +01:00
|
|
|
// If this framebuffer was rendered to earlier in this frame, make sure to pre-transition it to the correct layout.
|
|
|
|
if (steps_[i]->render.finalDepthStencilLayout == VK_IMAGE_LAYOUT_UNDEFINED) {
|
|
|
|
steps_[i]->render.finalDepthStencilLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
|
|
|
|
}
|
2020-08-27 21:23:41 +02:00
|
|
|
} // We don't (yet?) support texturing from stencil images.
|
2018-05-27 14:14:02 -07:00
|
|
|
steps_[i]->render.numReads++;
|
2018-03-18 13:14:20 +01:00
|
|
|
break;
|
2017-08-19 17:32:10 +02:00
|
|
|
}
|
2017-08-16 23:03:30 +02:00
|
|
|
}
|
2017-08-19 17:32:10 +02:00
|
|
|
|
2019-08-13 23:07:58 +02:00
|
|
|
// Track dependencies fully.
|
|
|
|
curRenderStep_->dependencies.insert(fb);
|
|
|
|
|
2022-09-25 23:24:54 +02:00
|
|
|
// Add this pretransition unless we already have it.
|
|
|
|
TransitionRequest rq{ fb, aspectBit, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL };
|
|
|
|
curRenderStep_->preTransitions.insert(rq); // Note that insert avoids inserting duplicates.
|
2022-10-18 00:26:10 +02:00
|
|
|
|
2022-10-23 11:21:35 +02:00
|
|
|
if (layer == -1) {
|
|
|
|
return aspectBit == VK_IMAGE_ASPECT_COLOR_BIT ? fb->color.texAllLayersView : fb->depth.texAllLayersView;
|
2022-10-18 00:26:10 +02:00
|
|
|
} else {
|
2022-10-23 11:21:35 +02:00
|
|
|
return aspectBit == VK_IMAGE_ASPECT_COLOR_BIT ? fb->color.texLayerViews[layer] : fb->depth.texLayerViews[layer];
|
2022-10-18 00:26:10 +02:00
|
|
|
}
|
2017-08-16 23:03:30 +02:00
|
|
|
}
|
|
|
|
|
2022-09-20 16:27:05 +02:00
|
|
|
// Called on main thread.
|
|
|
|
// Sends the collected commands to the render thread. Submit-latency should be
|
|
|
|
// measured from here, probably.
|
2017-10-28 16:47:08 +02:00
|
|
|
void VulkanRenderManager::Finish() {
|
2020-10-11 11:47:24 +02:00
|
|
|
EndCurRenderStep();
|
2018-05-03 07:05:36 -07:00
|
|
|
|
|
|
|
// Let's do just a bit of cleanup on render commands now.
|
|
|
|
for (auto &step : steps_) {
|
|
|
|
if (step->stepType == VKRStepType::RENDER) {
|
|
|
|
CleanupRenderCommands(&step->commands);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-08-22 17:44:28 +02:00
|
|
|
int curFrame = vulkan_->GetCurFrame();
|
2017-08-22 18:05:05 +02:00
|
|
|
FrameData &frameData = frameData_[curFrame];
|
2022-09-20 16:27:05 +02:00
|
|
|
|
2022-11-08 16:56:47 +01:00
|
|
|
VLOG("PUSH: Frame[%d]", curFrame);
|
|
|
|
VKRRenderThreadTask task;
|
|
|
|
task.frame = curFrame;
|
|
|
|
task.runType = VKRRunType::PRESENT;
|
2022-09-19 18:17:26 +02:00
|
|
|
{
|
2022-09-23 22:10:21 +02:00
|
|
|
std::unique_lock<std::mutex> lock(pushMutex_);
|
2022-09-23 19:39:00 +02:00
|
|
|
renderThreadQueue_.push(task);
|
|
|
|
renderThreadQueue_.back().steps = std::move(steps_);
|
|
|
|
pushCondVar_.notify_one();
|
2017-08-22 17:18:54 +02:00
|
|
|
}
|
2017-11-05 07:07:14 -08:00
|
|
|
|
2022-09-23 19:39:00 +02:00
|
|
|
steps_.clear();
|
|
|
|
vulkan_->EndFrame();
|
2017-11-05 07:07:14 -08:00
|
|
|
insideFrame_ = false;
|
2017-08-22 17:18:54 +02:00
|
|
|
}
|
|
|
|
|
2017-11-01 21:42:19 +01:00
|
|
|
void VulkanRenderManager::Wipe() {
|
2017-11-05 14:22:09 -08:00
|
|
|
for (auto step : steps_) {
|
|
|
|
delete step;
|
2017-11-01 21:42:19 +01:00
|
|
|
}
|
|
|
|
steps_.clear();
|
|
|
|
}
|
|
|
|
|
2022-09-20 14:49:39 +02:00
|
|
|
// Called on the render thread.
|
|
|
|
//
|
2022-09-23 18:39:00 +02:00
|
|
|
// Can be called again after a VKRRunType::SYNC on the same frame.
|
2022-09-23 19:39:00 +02:00
|
|
|
void VulkanRenderManager::Run(VKRRenderThreadTask &task) {
|
|
|
|
FrameData &frameData = frameData_[task.frame];
|
2022-09-17 00:36:43 +02:00
|
|
|
|
2022-09-20 14:49:39 +02:00
|
|
|
_dbg_assert_(!frameData.hasPresentCommands);
|
|
|
|
frameData.SubmitPending(vulkan_, FrameSubmitType::Pending, frameDataShared_);
|
2022-09-17 00:36:43 +02:00
|
|
|
|
2022-09-20 16:52:14 +02:00
|
|
|
if (!frameData.hasMainCommands) {
|
2022-09-17 01:34:38 +02:00
|
|
|
// Effectively resets both main and present command buffers, since they both live in this pool.
|
2022-09-20 16:52:14 +02:00
|
|
|
// We always record main commands first, so we don't need to reset the present command buffer separately.
|
2021-03-11 22:08:41 +01:00
|
|
|
vkResetCommandPool(vulkan_->GetDevice(), frameData.cmdPoolMain, 0);
|
2022-09-17 01:34:38 +02:00
|
|
|
|
2017-10-28 16:47:08 +02:00
|
|
|
VkCommandBufferBeginInfo begin{ VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO };
|
|
|
|
begin.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
|
2022-09-17 01:34:38 +02:00
|
|
|
VkResult res = vkBeginCommandBuffer(frameData.mainCmd, &begin);
|
2022-09-20 14:49:39 +02:00
|
|
|
frameData.hasMainCommands = true;
|
2020-07-19 17:47:02 +02:00
|
|
|
_assert_msg_(res == VK_SUCCESS, "vkBeginCommandBuffer failed! result=%s", VulkanResultToString(res));
|
2017-10-28 16:47:08 +02:00
|
|
|
}
|
|
|
|
|
2022-09-23 19:39:00 +02:00
|
|
|
queueRunner_.PreprocessSteps(task.steps);
|
2022-09-21 00:09:34 -07:00
|
|
|
// Likely during shutdown, happens in headless.
|
2022-09-23 19:39:00 +02:00
|
|
|
if (task.steps.empty() && !frameData.hasAcquired)
|
2022-09-21 00:09:34 -07:00
|
|
|
frameData.skipSwap = true;
|
2020-08-27 16:53:26 +02:00
|
|
|
//queueRunner_.LogSteps(stepsOnThread, false);
|
2022-11-06 14:36:51 +01:00
|
|
|
if (IsVREnabled()) {
|
2022-10-17 19:07:27 +02:00
|
|
|
int passes = GetVRPassesCount();
|
2022-10-12 16:21:54 +02:00
|
|
|
for (int i = 0; i < passes; i++) {
|
|
|
|
PreVRFrameRender(i);
|
|
|
|
queueRunner_.RunSteps(task.steps, frameData, frameDataShared_, i < passes - 1);
|
|
|
|
PostVRFrameRender();
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
queueRunner_.RunSteps(task.steps, frameData, frameDataShared_);
|
|
|
|
}
|
2022-09-23 18:39:00 +02:00
|
|
|
|
2022-09-23 19:39:00 +02:00
|
|
|
switch (task.runType) {
|
|
|
|
case VKRRunType::PRESENT:
|
2022-09-23 18:39:00 +02:00
|
|
|
frameData.SubmitPending(vulkan_, FrameSubmitType::Present, frameDataShared_);
|
|
|
|
|
|
|
|
if (!frameData.skipSwap) {
|
|
|
|
VkResult res = frameData.QueuePresent(vulkan_, frameDataShared_);
|
|
|
|
if (res == VK_ERROR_OUT_OF_DATE_KHR) {
|
|
|
|
// We clearly didn't get this in vkAcquireNextImageKHR because of the skipSwap check above.
|
|
|
|
// Do the increment.
|
|
|
|
outOfDateFrames_++;
|
|
|
|
} else if (res == VK_SUBOPTIMAL_KHR) {
|
|
|
|
outOfDateFrames_++;
|
|
|
|
} else if (res != VK_SUCCESS) {
|
|
|
|
_assert_msg_(false, "vkQueuePresentKHR failed! result=%s", VulkanResultToString(res));
|
|
|
|
} else {
|
|
|
|
// Success
|
|
|
|
outOfDateFrames_ = 0;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// We only get here if vkAcquireNextImage returned VK_ERROR_OUT_OF_DATE.
|
|
|
|
outOfDateFrames_++;
|
|
|
|
frameData.skipSwap = false;
|
|
|
|
}
|
2017-11-04 22:23:01 -07:00
|
|
|
break;
|
|
|
|
|
|
|
|
case VKRRunType::SYNC:
|
2022-09-23 18:39:00 +02:00
|
|
|
// The submit will trigger the readbackFence, and also do the wait for it.
|
|
|
|
frameData.SubmitPending(vulkan_, FrameSubmitType::Sync, frameDataShared_);
|
2022-09-23 19:39:00 +02:00
|
|
|
|
|
|
|
{
|
|
|
|
std::unique_lock<std::mutex> lock(syncMutex_);
|
|
|
|
syncCondVar_.notify_one();
|
|
|
|
}
|
|
|
|
|
2022-09-23 18:39:00 +02:00
|
|
|
// At this point the GPU is idle, and we can resume filling the command buffers for the
|
|
|
|
// current frame since and thus all previously enqueued command buffers have been
|
|
|
|
// processed. No need to switch to the next frame number, would just be confusing.
|
2017-11-04 22:23:01 -07:00
|
|
|
break;
|
|
|
|
|
|
|
|
default:
|
2020-08-02 15:41:00 +02:00
|
|
|
_dbg_assert_(false);
|
2017-11-04 22:23:01 -07:00
|
|
|
}
|
2017-10-28 16:47:08 +02:00
|
|
|
|
2022-09-23 19:39:00 +02:00
|
|
|
VLOG("PULL: Finished running frame %d", task.frame);
|
2017-08-19 17:32:10 +02:00
|
|
|
}
|
2017-10-28 18:03:27 +02:00
|
|
|
|
2022-09-23 18:39:00 +02:00
|
|
|
// Called from main thread.
|
2017-11-04 22:23:01 -07:00
|
|
|
void VulkanRenderManager::FlushSync() {
|
2022-11-24 10:38:49 +01:00
|
|
|
if (invalidationCallback_) {
|
2022-12-01 19:15:38 +01:00
|
|
|
invalidationCallback_(InvalidationCallbackFlags::COMMAND_BUFFER_STATE);
|
2022-11-24 10:38:49 +01:00
|
|
|
}
|
2020-05-24 22:39:29 -07:00
|
|
|
|
2017-11-04 22:23:01 -07:00
|
|
|
int curFrame = vulkan_->GetCurFrame();
|
|
|
|
FrameData &frameData = frameData_[curFrame];
|
2022-09-19 18:17:26 +02:00
|
|
|
|
|
|
|
{
|
2022-09-23 19:39:00 +02:00
|
|
|
VLOG("PUSH: Frame[%d]", curFrame);
|
|
|
|
VKRRenderThreadTask task;
|
|
|
|
task.frame = curFrame;
|
|
|
|
task.runType = VKRRunType::SYNC;
|
2022-09-23 22:10:21 +02:00
|
|
|
std::unique_lock<std::mutex> lock(pushMutex_);
|
2022-09-23 19:39:00 +02:00
|
|
|
renderThreadQueue_.push(task);
|
|
|
|
renderThreadQueue_.back().steps = std::move(steps_);
|
|
|
|
pushCondVar_.notify_one();
|
2017-11-04 22:23:01 -07:00
|
|
|
}
|
|
|
|
|
2022-09-19 18:17:26 +02:00
|
|
|
{
|
2022-09-23 19:39:00 +02:00
|
|
|
std::unique_lock<std::mutex> lock(syncMutex_);
|
2017-11-04 22:23:01 -07:00
|
|
|
// Wait for the flush to be hit, since we're syncing.
|
2022-09-23 19:39:00 +02:00
|
|
|
while (!frameData.syncDone) {
|
2022-10-03 15:56:30 +02:00
|
|
|
VLOG("PUSH: Waiting for frame[%d].syncDone = 1 (sync)", curFrame);
|
2022-09-23 19:39:00 +02:00
|
|
|
syncCondVar_.wait(lock);
|
2017-11-04 22:23:01 -07:00
|
|
|
}
|
2022-09-23 19:39:00 +02:00
|
|
|
frameData.syncDone = false;
|
2017-11-04 22:23:01 -07:00
|
|
|
}
|
2017-10-28 18:03:27 +02:00
|
|
|
}
|
2022-10-10 10:53:27 +02:00
|
|
|
|
|
|
|
void VulkanRenderManager::ResetStats() {
|
|
|
|
initTimeMs_.Reset();
|
|
|
|
totalGPUTimeMs_.Reset();
|
|
|
|
renderCPUTimeMs_.Reset();
|
|
|
|
}
|