2021-03-02 20:57:25 -08:00
|
|
|
#include "ppsspp_config.h"
|
2017-11-18 15:42:39 +01:00
|
|
|
#include "GLRenderManager.h"
|
2020-10-04 23:24:14 +02:00
|
|
|
#include "Common/GPU/OpenGL/GLFeatures.h"
|
|
|
|
#include "Common/GPU/thin3d.h"
|
2020-10-01 09:27:25 +02:00
|
|
|
#include "Common/Thread/ThreadUtil.h"
|
2022-08-27 17:33:37 +02:00
|
|
|
#include "Common/VR/PPSSPPVR.h"
|
2020-08-15 15:51:41 +02:00
|
|
|
|
|
|
|
#include "Common/Log.h"
|
2022-11-08 16:56:47 +01:00
|
|
|
#include "Common/TimeUtil.h"
|
2018-03-04 22:21:57 +01:00
|
|
|
#include "Common/MemoryUtil.h"
|
2023-05-17 14:38:11 +02:00
|
|
|
#include "Common/StringUtils.h"
|
2020-11-10 00:12:15 +01:00
|
|
|
#include "Common/Math/math_util.h"
|
2017-11-18 15:42:39 +01:00
|
|
|
|
|
|
|
#if 0 // def _DEBUG
|
2020-08-15 15:51:41 +02:00
|
|
|
#define VLOG(...) INFO_LOG(G3D, __VA_ARGS__)
|
2017-11-18 15:42:39 +01:00
|
|
|
#else
|
|
|
|
#define VLOG(...)
|
|
|
|
#endif
|
|
|
|
|
2023-05-18 22:02:38 +02:00
|
|
|
std::thread::id renderThreadId;
|
2018-04-08 19:00:24 -07:00
|
|
|
|
2022-08-07 11:09:34 +02:00
|
|
|
GLRTexture::GLRTexture(const Draw::DeviceCaps &caps, int width, int height, int depth, int numMips) {
|
|
|
|
if (caps.textureNPOTFullySupported) {
|
2020-11-10 00:12:15 +01:00
|
|
|
canWrap = true;
|
|
|
|
} else {
|
|
|
|
canWrap = isPowerOf2(width) && isPowerOf2(height);
|
|
|
|
}
|
|
|
|
w = width;
|
|
|
|
h = height;
|
2022-08-13 12:37:06 -07:00
|
|
|
d = depth;
|
2020-11-10 00:12:15 +01:00
|
|
|
this->numMips = numMips;
|
|
|
|
}
|
|
|
|
|
|
|
|
GLRTexture::~GLRTexture() {
|
|
|
|
if (texture) {
|
|
|
|
glDeleteTextures(1, &texture);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-05-10 00:20:43 +02:00
|
|
|
GLRenderManager::GLRenderManager() {
|
|
|
|
// size_t sz = sizeof(GLRRenderData);
|
|
|
|
// _dbg_assert_(sz == 88);
|
|
|
|
}
|
|
|
|
|
2017-11-18 15:42:39 +01:00
|
|
|
GLRenderManager::~GLRenderManager() {
|
2022-10-03 15:56:30 +02:00
|
|
|
_dbg_assert_(!run_);
|
|
|
|
|
2017-11-18 15:42:39 +01:00
|
|
|
for (int i = 0; i < MAX_INFLIGHT_FRAMES; i++) {
|
2018-02-04 13:07:55 +01:00
|
|
|
_assert_(frameData_[i].deleter.IsEmpty());
|
2018-02-08 00:40:55 +01:00
|
|
|
_assert_(frameData_[i].deleter_prev.IsEmpty());
|
2017-11-18 15:42:39 +01:00
|
|
|
}
|
2018-02-04 13:38:58 +01:00
|
|
|
// Was anything deleted during shutdown?
|
2018-10-20 15:59:33 +02:00
|
|
|
deleter_.Perform(this, skipGLCalls_);
|
2018-02-08 00:23:48 +01:00
|
|
|
_assert_(deleter_.IsEmpty());
|
2017-11-18 15:42:39 +01:00
|
|
|
}
|
|
|
|
|
2018-12-23 12:46:48 -08:00
|
|
|
void GLRenderManager::ThreadStart(Draw::DrawContext *draw) {
|
2018-01-16 14:16:56 +01:00
|
|
|
queueRunner_.CreateDeviceObjects();
|
2018-04-08 19:00:24 -07:00
|
|
|
renderThreadId = std::this_thread::get_id();
|
2018-03-04 14:23:09 -08:00
|
|
|
|
2020-03-02 19:21:15 -08:00
|
|
|
if (newInflightFrames_ != -1) {
|
2020-08-15 16:13:24 +02:00
|
|
|
INFO_LOG(G3D, "Updating inflight frames to %d", newInflightFrames_);
|
2020-03-02 19:21:15 -08:00
|
|
|
inflightFrames_ = newInflightFrames_;
|
|
|
|
newInflightFrames_ = -1;
|
|
|
|
}
|
|
|
|
|
2018-12-23 12:46:48 -08:00
|
|
|
// Don't save draw, we don't want any thread safety confusion.
|
|
|
|
bool mapBuffers = draw->GetBugs().Has(Draw::Bugs::ANY_MAP_BUFFER_RANGE_SLOW);
|
2018-03-04 14:23:09 -08:00
|
|
|
bool hasBufferStorage = gl_extensions.ARB_buffer_storage || gl_extensions.EXT_buffer_storage;
|
|
|
|
if (!gl_extensions.VersionGEThan(3, 0, 0) && gl_extensions.IsGLES && !hasBufferStorage) {
|
|
|
|
// Force disable if it wouldn't work anyway.
|
|
|
|
mapBuffers = false;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Notes on buffer mapping:
|
|
|
|
// NVIDIA GTX 9xx / 2017-10 drivers - mapping improves speed, basic unmap seems best.
|
|
|
|
// PowerVR GX6xxx / iOS 10.3 - mapping has little improvement, explicit flush is slower.
|
|
|
|
if (mapBuffers) {
|
|
|
|
switch (gl_extensions.gpuVendor) {
|
|
|
|
case GPU_VENDOR_NVIDIA:
|
|
|
|
bufferStrategy_ = GLBufferStrategy::FRAME_UNMAP;
|
|
|
|
break;
|
|
|
|
|
2019-02-10 20:13:39 +01:00
|
|
|
// Temporarily disabled because it doesn't work with task switching on Android.
|
|
|
|
// The mapped buffer seems to just be pulled out like a rug from under us, crashing
|
|
|
|
// as soon as any write happens, which can happen during shutdown since we write from the
|
|
|
|
// Emu thread which may not yet have shut down. There may be solutions to this, but for now,
|
|
|
|
// disable this strategy to avoid crashing.
|
|
|
|
//case GPU_VENDOR_QUALCOMM:
|
|
|
|
// bufferStrategy_ = GLBufferStrategy::FLUSH_INVALIDATE_UNMAP;
|
|
|
|
// break;
|
2018-03-04 14:23:09 -08:00
|
|
|
|
|
|
|
default:
|
|
|
|
bufferStrategy_ = GLBufferStrategy::SUBDATA;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
bufferStrategy_ = GLBufferStrategy::SUBDATA;
|
|
|
|
}
|
2018-01-16 14:16:56 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void GLRenderManager::ThreadEnd() {
|
2020-08-15 16:13:24 +02:00
|
|
|
INFO_LOG(G3D, "ThreadEnd");
|
2018-01-31 18:35:48 +01:00
|
|
|
|
2018-01-16 14:16:56 +01:00
|
|
|
queueRunner_.DestroyDeviceObjects();
|
2022-11-08 16:56:47 +01:00
|
|
|
VLOG(" PULL: Quitting");
|
2018-01-31 18:35:48 +01:00
|
|
|
|
2022-11-08 16:56:47 +01:00
|
|
|
// Good time to run all the deleters to get rid of leftover objects.
|
2018-01-31 18:35:48 +01:00
|
|
|
for (int i = 0; i < MAX_INFLIGHT_FRAMES; i++) {
|
2018-10-21 09:14:02 +02:00
|
|
|
// Since we're in shutdown, we should skip the GL calls on Android.
|
|
|
|
frameData_[i].deleter.Perform(this, skipGLCalls_);
|
|
|
|
frameData_[i].deleter_prev.Perform(this, skipGLCalls_);
|
2018-01-31 18:35:48 +01:00
|
|
|
}
|
2018-10-22 09:59:13 +02:00
|
|
|
deleter_.Perform(this, skipGLCalls_);
|
2018-01-31 18:35:48 +01:00
|
|
|
for (int i = 0; i < (int)steps_.size(); i++) {
|
|
|
|
delete steps_[i];
|
|
|
|
}
|
|
|
|
steps_.clear();
|
|
|
|
initSteps_.clear();
|
2018-01-16 14:16:56 +01:00
|
|
|
}
|
|
|
|
|
2022-10-03 15:56:30 +02:00
|
|
|
// Unlike in Vulkan, this isn't a full independent function, instead it gets called every frame.
|
2022-11-08 16:56:47 +01:00
|
|
|
//
|
|
|
|
// This means that we have to block and run the render queue until we've presented one frame,
|
|
|
|
// at which point we can leave.
|
|
|
|
//
|
|
|
|
// NOTE: If run_ is true, we WILL run a task!
|
2018-01-16 18:13:31 +01:00
|
|
|
bool GLRenderManager::ThreadFrame() {
|
2022-10-03 16:35:08 +02:00
|
|
|
if (!run_) {
|
2018-01-19 21:48:38 -08:00
|
|
|
return false;
|
2022-10-03 16:35:08 +02:00
|
|
|
}
|
2018-01-20 16:44:36 -08:00
|
|
|
|
2023-05-16 23:14:54 +02:00
|
|
|
GLRRenderThreadTask *task = nullptr;
|
2022-10-03 15:56:30 +02:00
|
|
|
|
2018-01-20 16:44:36 -08:00
|
|
|
// In case of syncs or other partial completion, we keep going until we complete a frame.
|
2022-10-03 15:56:30 +02:00
|
|
|
while (true) {
|
|
|
|
// Pop a task of the queue and execute it.
|
2022-11-08 16:56:47 +01:00
|
|
|
// NOTE: We need to actually wait for a task, we can't just bail!
|
2018-01-20 16:44:36 -08:00
|
|
|
{
|
2022-10-03 15:56:30 +02:00
|
|
|
std::unique_lock<std::mutex> lock(pushMutex_);
|
2022-11-08 16:56:47 +01:00
|
|
|
while (renderThreadQueue_.empty()) {
|
|
|
|
pushCondVar_.wait(lock);
|
2018-01-20 16:44:36 -08:00
|
|
|
}
|
2023-05-16 23:29:41 +02:00
|
|
|
task = std::move(renderThreadQueue_.front());
|
2022-10-03 15:56:30 +02:00
|
|
|
renderThreadQueue_.pop();
|
2017-11-18 15:42:39 +01:00
|
|
|
}
|
2022-10-03 15:56:30 +02:00
|
|
|
|
|
|
|
// We got a task! We can now have pushMutex_ unlocked, allowing the host to
|
|
|
|
// push more work when it feels like it, and just start working.
|
2023-05-16 23:14:54 +02:00
|
|
|
if (task->runType == GLRRunType::EXIT) {
|
2022-10-03 15:56:30 +02:00
|
|
|
// Oh, host wanted out. Let's leave, and also let's notify the host.
|
|
|
|
// This is unlike Vulkan too which can just block on the thread existing.
|
|
|
|
std::unique_lock<std::mutex> lock(syncMutex_);
|
|
|
|
syncCondVar_.notify_one();
|
|
|
|
syncDone_ = true;
|
|
|
|
break;
|
2018-01-16 18:13:31 +01:00
|
|
|
}
|
2022-09-14 23:23:28 +02:00
|
|
|
|
|
|
|
// Render the scene.
|
2023-05-16 23:14:54 +02:00
|
|
|
VLOG(" PULL: Frame %d RUN (%0.3f)", task->frame, time_now_d());
|
|
|
|
if (Run(*task)) {
|
2022-11-08 16:56:47 +01:00
|
|
|
// Swap requested, so we just bail the loop.
|
2023-05-16 23:14:54 +02:00
|
|
|
delete task;
|
2022-11-08 16:56:47 +01:00
|
|
|
break;
|
|
|
|
}
|
2023-05-16 23:14:54 +02:00
|
|
|
delete task;
|
2022-10-03 15:56:30 +02:00
|
|
|
};
|
2022-08-19 16:59:39 +02:00
|
|
|
|
2022-11-08 16:56:47 +01:00
|
|
|
return true;
|
2017-11-18 15:42:39 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void GLRenderManager::StopThread() {
|
2022-10-03 15:56:30 +02:00
|
|
|
// There's not really a lot to do here anymore.
|
2022-11-08 16:56:47 +01:00
|
|
|
INFO_LOG(G3D, "GLRenderManager::StopThread()");
|
2018-02-08 00:23:48 +01:00
|
|
|
if (run_) {
|
2017-11-18 15:42:39 +01:00
|
|
|
run_ = false;
|
2022-11-08 16:56:47 +01:00
|
|
|
|
|
|
|
std::unique_lock<std::mutex> lock(pushMutex_);
|
2023-05-16 23:14:54 +02:00
|
|
|
renderThreadQueue_.push(new GLRRenderThreadTask(GLRRunType::EXIT));
|
2022-11-08 21:59:08 +01:00
|
|
|
pushCondVar_.notify_one();
|
2017-11-18 15:42:39 +01:00
|
|
|
} else {
|
2022-10-03 16:35:08 +02:00
|
|
|
WARN_LOG(G3D, "GL submission thread was already paused.");
|
2017-11-18 15:42:39 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-05-17 14:38:11 +02:00
|
|
|
std::string GLRenderManager::GetGpuProfileString() const {
|
|
|
|
int curFrame = GetCurFrame();
|
|
|
|
const GLQueueProfileContext &profile = frameData_[curFrame].profile;
|
|
|
|
|
|
|
|
float cputime_ms = 1000.0f * (profile.cpuEndTime - profile.cpuStartTime);
|
2023-05-24 14:08:19 +02:00
|
|
|
return StringFromFormat("CPU time to run the list: %0.2f ms\n\n%s", cputime_ms, profilePassesString_.c_str());
|
2023-05-17 14:38:11 +02:00
|
|
|
}
|
|
|
|
|
2020-05-21 11:24:05 +02:00
|
|
|
void GLRenderManager::BindFramebufferAsRenderTarget(GLRFramebuffer *fb, GLRRenderPassAction color, GLRRenderPassAction depth, GLRRenderPassAction stencil, uint32_t clearColor, float clearDepth, uint8_t clearStencil, const char *tag) {
|
2020-08-16 00:38:55 +02:00
|
|
|
_assert_(insideFrame_);
|
2018-07-28 11:09:01 +02:00
|
|
|
#ifdef _DEBUG
|
|
|
|
curProgram_ = nullptr;
|
|
|
|
#endif
|
2022-07-15 15:41:21 +02:00
|
|
|
|
2017-11-18 15:42:39 +01:00
|
|
|
// Eliminate dupes.
|
2023-04-18 19:18:38 +08:00
|
|
|
if (steps_.size() && steps_.back()->stepType == GLRStepType::RENDER && steps_.back()->render.framebuffer == fb) {
|
2018-01-18 21:34:52 -08:00
|
|
|
if (color != GLRRenderPassAction::CLEAR && depth != GLRRenderPassAction::CLEAR && stencil != GLRRenderPassAction::CLEAR) {
|
2017-11-18 15:42:39 +01:00
|
|
|
// We don't move to a new step, this bind was unnecessary and we can safely skip it.
|
2020-05-17 20:40:22 -07:00
|
|
|
curRenderStep_ = steps_.back();
|
2017-11-18 15:42:39 +01:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (curRenderStep_ && curRenderStep_->commands.size() == 0) {
|
2023-04-18 19:18:38 +08:00
|
|
|
VLOG("Empty render step. Usually happens after uploading pixels.");
|
2017-11-18 15:42:39 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
GLRStep *step = new GLRStep{ GLRStepType::RENDER };
|
|
|
|
// This is what queues up new passes, and can end previous ones.
|
|
|
|
step->render.framebuffer = fb;
|
2020-05-16 16:56:31 -07:00
|
|
|
step->render.color = color;
|
|
|
|
step->render.depth = depth;
|
|
|
|
step->render.stencil = stencil;
|
2020-05-21 11:24:05 +02:00
|
|
|
step->tag = tag;
|
2017-11-18 15:42:39 +01:00
|
|
|
steps_.push_back(step);
|
|
|
|
|
2017-11-18 22:15:12 +01:00
|
|
|
GLuint clearMask = 0;
|
2023-05-10 10:09:39 +02:00
|
|
|
GLRRenderData data(GLRRenderCommand::CLEAR);
|
2017-11-18 22:15:12 +01:00
|
|
|
if (color == GLRRenderPassAction::CLEAR) {
|
|
|
|
clearMask |= GL_COLOR_BUFFER_BIT;
|
|
|
|
data.clear.clearColor = clearColor;
|
|
|
|
}
|
|
|
|
if (depth == GLRRenderPassAction::CLEAR) {
|
2018-01-18 21:34:52 -08:00
|
|
|
clearMask |= GL_DEPTH_BUFFER_BIT;
|
2017-11-18 22:15:12 +01:00
|
|
|
data.clear.clearZ = clearDepth;
|
2018-01-18 21:34:52 -08:00
|
|
|
}
|
|
|
|
if (stencil == GLRRenderPassAction::CLEAR) {
|
|
|
|
clearMask |= GL_STENCIL_BUFFER_BIT;
|
2017-11-18 22:15:12 +01:00
|
|
|
data.clear.clearStencil = clearStencil;
|
|
|
|
}
|
|
|
|
if (clearMask) {
|
2018-01-31 17:07:20 +01:00
|
|
|
data.clear.scissorX = 0;
|
|
|
|
data.clear.scissorY = 0;
|
|
|
|
data.clear.scissorW = 0;
|
|
|
|
data.clear.scissorH = 0;
|
2017-11-18 22:15:12 +01:00
|
|
|
data.clear.clearMask = clearMask;
|
2018-01-18 22:17:29 -08:00
|
|
|
data.clear.colorMask = 0xF;
|
2017-11-18 22:15:12 +01:00
|
|
|
step->commands.push_back(data);
|
|
|
|
}
|
2017-11-18 15:42:39 +01:00
|
|
|
curRenderStep_ = step;
|
2018-01-19 21:25:56 -08:00
|
|
|
|
2020-05-16 16:56:31 -07:00
|
|
|
if (fb) {
|
|
|
|
if (color == GLRRenderPassAction::KEEP || depth == GLRRenderPassAction::KEEP || stencil == GLRRenderPassAction::KEEP) {
|
|
|
|
step->dependencies.insert(fb);
|
|
|
|
}
|
|
|
|
}
|
2022-11-24 10:38:49 +01:00
|
|
|
|
|
|
|
if (invalidationCallback_) {
|
2022-12-01 19:15:38 +01:00
|
|
|
invalidationCallback_(InvalidationCallbackFlags::RENDER_PASS_STATE);
|
2022-11-24 10:38:49 +01:00
|
|
|
}
|
2017-11-18 15:42:39 +01:00
|
|
|
}
|
|
|
|
|
2022-10-20 10:15:19 +02:00
|
|
|
void GLRenderManager::BindFramebufferAsTexture(GLRFramebuffer *fb, int binding, int aspectBit) {
|
2020-07-19 17:47:02 +02:00
|
|
|
_dbg_assert_(curRenderStep_ && curRenderStep_->stepType == GLRStepType::RENDER);
|
2021-06-12 14:10:57 -07:00
|
|
|
_dbg_assert_(binding < MAX_GL_TEXTURE_SLOTS);
|
2017-11-18 22:15:12 +01:00
|
|
|
GLRRenderData data{ GLRRenderCommand::BIND_FB_TEXTURE };
|
|
|
|
data.bind_fb_texture.slot = binding;
|
|
|
|
data.bind_fb_texture.framebuffer = fb;
|
|
|
|
data.bind_fb_texture.aspect = aspectBit;
|
|
|
|
curRenderStep_->commands.push_back(data);
|
2020-05-16 16:56:31 -07:00
|
|
|
curRenderStep_->dependencies.insert(fb);
|
2017-11-18 15:42:39 +01:00
|
|
|
}
|
|
|
|
|
2020-05-21 11:24:05 +02:00
|
|
|
void GLRenderManager::CopyFramebuffer(GLRFramebuffer *src, GLRect2D srcRect, GLRFramebuffer *dst, GLOffset2D dstPos, int aspectMask, const char *tag) {
|
2018-01-19 21:25:56 -08:00
|
|
|
GLRStep *step = new GLRStep{ GLRStepType::COPY };
|
2017-11-18 21:12:11 +01:00
|
|
|
step->copy.srcRect = srcRect;
|
|
|
|
step->copy.dstPos = dstPos;
|
|
|
|
step->copy.src = src;
|
|
|
|
step->copy.dst = dst;
|
|
|
|
step->copy.aspectMask = aspectMask;
|
2020-05-16 16:56:31 -07:00
|
|
|
step->dependencies.insert(src);
|
2020-05-21 11:24:05 +02:00
|
|
|
step->tag = tag;
|
2020-05-16 16:56:31 -07:00
|
|
|
bool fillsDst = dst && srcRect.x == 0 && srcRect.y == 0 && srcRect.w == dst->width && srcRect.h == dst->height;
|
|
|
|
if (dstPos.x != 0 || dstPos.y != 0 || !fillsDst)
|
|
|
|
step->dependencies.insert(dst);
|
2017-11-18 21:12:11 +01:00
|
|
|
steps_.push_back(step);
|
2017-11-18 15:42:39 +01:00
|
|
|
}
|
|
|
|
|
2020-05-21 11:24:05 +02:00
|
|
|
void GLRenderManager::BlitFramebuffer(GLRFramebuffer *src, GLRect2D srcRect, GLRFramebuffer *dst, GLRect2D dstRect, int aspectMask, bool filter, const char *tag) {
|
2018-01-19 21:25:56 -08:00
|
|
|
GLRStep *step = new GLRStep{ GLRStepType::BLIT };
|
2017-12-14 16:08:55 +01:00
|
|
|
step->blit.srcRect = srcRect;
|
|
|
|
step->blit.dstRect = dstRect;
|
|
|
|
step->blit.src = src;
|
|
|
|
step->blit.dst = dst;
|
|
|
|
step->blit.aspectMask = aspectMask;
|
|
|
|
step->blit.filter = filter;
|
2020-05-16 16:56:31 -07:00
|
|
|
step->dependencies.insert(src);
|
2020-05-21 11:24:05 +02:00
|
|
|
step->tag = tag;
|
2020-05-16 16:56:31 -07:00
|
|
|
bool fillsDst = dst && dstRect.x == 0 && dstRect.y == 0 && dstRect.w == dst->width && dstRect.h == dst->height;
|
|
|
|
if (!fillsDst)
|
|
|
|
step->dependencies.insert(dst);
|
2017-12-14 16:08:55 +01:00
|
|
|
steps_.push_back(step);
|
2017-11-18 15:42:39 +01:00
|
|
|
}
|
|
|
|
|
2023-02-05 10:52:52 +01:00
|
|
|
bool GLRenderManager::CopyFramebufferToMemory(GLRFramebuffer *src, int aspectBits, int x, int y, int w, int h, Draw::DataFormat destFormat, uint8_t *pixels, int pixelStride, Draw::ReadbackMode mode, const char *tag) {
|
2018-10-28 14:30:39 +01:00
|
|
|
_assert_(pixels);
|
|
|
|
|
2017-12-19 14:35:24 +01:00
|
|
|
GLRStep *step = new GLRStep{ GLRStepType::READBACK };
|
|
|
|
step->readback.src = src;
|
|
|
|
step->readback.srcRect = { x, y, w, h };
|
|
|
|
step->readback.aspectMask = aspectBits;
|
|
|
|
step->readback.dstFormat = destFormat;
|
2020-05-16 16:56:31 -07:00
|
|
|
step->dependencies.insert(src);
|
2020-05-21 11:24:05 +02:00
|
|
|
step->tag = tag;
|
2017-12-19 14:35:24 +01:00
|
|
|
steps_.push_back(step);
|
|
|
|
|
|
|
|
curRenderStep_ = nullptr;
|
|
|
|
FlushSync();
|
|
|
|
|
|
|
|
Draw::DataFormat srcFormat;
|
|
|
|
if (aspectBits & GL_COLOR_BUFFER_BIT) {
|
|
|
|
srcFormat = Draw::DataFormat::R8G8B8A8_UNORM;
|
|
|
|
} else if (aspectBits & GL_STENCIL_BUFFER_BIT) {
|
|
|
|
// Copies from stencil are always S8.
|
|
|
|
srcFormat = Draw::DataFormat::S8;
|
|
|
|
} else if (aspectBits & GL_DEPTH_BUFFER_BIT) {
|
|
|
|
// TODO: Do this properly.
|
|
|
|
srcFormat = Draw::DataFormat::D24_S8;
|
|
|
|
} else {
|
2018-04-06 23:29:44 +02:00
|
|
|
return false;
|
2017-12-19 14:35:24 +01:00
|
|
|
}
|
2023-02-05 10:52:52 +01:00
|
|
|
queueRunner_.CopyFromReadbackBuffer(src, w, h, srcFormat, destFormat, pixelStride, pixels);
|
2017-12-19 14:35:24 +01:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2020-05-21 11:24:05 +02:00
|
|
|
void GLRenderManager::CopyImageToMemorySync(GLRTexture *texture, int mipLevel, int x, int y, int w, int h, Draw::DataFormat destFormat, uint8_t *pixels, int pixelStride, const char *tag) {
|
2018-08-25 09:57:37 -07:00
|
|
|
_assert_(texture);
|
2018-10-28 14:30:39 +01:00
|
|
|
_assert_(pixels);
|
2018-01-19 22:41:18 -08:00
|
|
|
GLRStep *step = new GLRStep{ GLRStepType::READBACK_IMAGE };
|
|
|
|
step->readback_image.texture = texture;
|
|
|
|
step->readback_image.mipLevel = mipLevel;
|
|
|
|
step->readback_image.srcRect = { x, y, w, h };
|
2020-05-21 11:24:05 +02:00
|
|
|
step->tag = tag;
|
2018-01-19 22:41:18 -08:00
|
|
|
steps_.push_back(step);
|
|
|
|
|
|
|
|
curRenderStep_ = nullptr;
|
|
|
|
FlushSync();
|
|
|
|
|
2023-02-05 10:52:52 +01:00
|
|
|
queueRunner_.CopyFromReadbackBuffer(nullptr, w, h, Draw::DataFormat::R8G8B8A8_UNORM, destFormat, pixelStride, pixels);
|
2018-01-19 22:41:18 -08:00
|
|
|
}
|
|
|
|
|
2023-05-17 14:38:11 +02:00
|
|
|
void GLRenderManager::BeginFrame(bool enableProfiling) {
|
2018-07-28 11:09:01 +02:00
|
|
|
#ifdef _DEBUG
|
|
|
|
curProgram_ = nullptr;
|
|
|
|
#endif
|
|
|
|
|
2017-11-18 15:42:39 +01:00
|
|
|
int curFrame = GetCurFrame();
|
|
|
|
|
2023-02-06 10:46:52 +01:00
|
|
|
GLFrameData &frameData = frameData_[curFrame];
|
2023-05-17 14:38:11 +02:00
|
|
|
frameData.profile.enabled = enableProfiling;
|
2023-05-24 14:08:19 +02:00
|
|
|
|
2018-02-08 00:23:48 +01:00
|
|
|
{
|
2022-11-08 16:56:47 +01:00
|
|
|
VLOG("PUSH: BeginFrame (curFrame = %d, readyForFence = %d, time=%0.3f)", curFrame, (int)frameData.readyForFence, time_now_d());
|
2022-10-03 15:56:30 +02:00
|
|
|
std::unique_lock<std::mutex> lock(frameData.fenceMutex);
|
2017-11-18 15:42:39 +01:00
|
|
|
while (!frameData.readyForFence) {
|
2022-10-03 15:56:30 +02:00
|
|
|
frameData.fenceCondVar.wait(lock);
|
2017-11-18 15:42:39 +01:00
|
|
|
}
|
|
|
|
frameData.readyForFence = false;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!run_) {
|
2020-08-15 16:13:24 +02:00
|
|
|
WARN_LOG(G3D, "BeginFrame while !run_!");
|
2017-11-18 15:42:39 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
insideFrame_ = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
void GLRenderManager::Finish() {
|
2022-11-08 16:56:47 +01:00
|
|
|
curRenderStep_ = nullptr; // EndCurRenderStep is this simple here.
|
2022-10-03 15:56:30 +02:00
|
|
|
|
2017-11-18 15:42:39 +01:00
|
|
|
int curFrame = GetCurFrame();
|
2023-02-06 10:46:52 +01:00
|
|
|
GLFrameData &frameData = frameData_[curFrame];
|
2022-10-03 15:56:30 +02:00
|
|
|
|
2022-11-08 16:56:47 +01:00
|
|
|
frameData_[curFrame].deleter.Take(deleter_);
|
2017-11-18 15:42:39 +01:00
|
|
|
|
2022-11-08 16:56:47 +01:00
|
|
|
VLOG("PUSH: Finish, pushing task. curFrame = %d", curFrame);
|
2023-05-16 23:14:54 +02:00
|
|
|
GLRRenderThreadTask *task = new GLRRenderThreadTask(GLRRunType::PRESENT);
|
|
|
|
task->frame = curFrame;
|
2022-11-08 16:56:47 +01:00
|
|
|
|
|
|
|
{
|
2022-10-03 15:56:30 +02:00
|
|
|
std::unique_lock<std::mutex> lock(pushMutex_);
|
|
|
|
renderThreadQueue_.push(task);
|
2023-05-16 23:14:54 +02:00
|
|
|
renderThreadQueue_.back()->initSteps = std::move(initSteps_);
|
|
|
|
renderThreadQueue_.back()->steps = std::move(steps_);
|
2022-10-03 15:56:30 +02:00
|
|
|
initSteps_.clear();
|
|
|
|
steps_.clear();
|
|
|
|
pushCondVar_.notify_one();
|
|
|
|
}
|
2018-02-04 13:09:51 +01:00
|
|
|
|
2023-05-25 14:14:03 +02:00
|
|
|
if (frameData.profile.enabled) {
|
|
|
|
profilePassesString_ = std::move(frameData.profile.passesString);
|
|
|
|
|
|
|
|
#ifdef _DEBUG
|
|
|
|
std::string cmdString;
|
|
|
|
for (int i = 0; i < ARRAY_SIZE(frameData.profile.commandCounts); i++) {
|
|
|
|
if (frameData.profile.commandCounts[i] > 0) {
|
|
|
|
cmdString += StringFromFormat("%s: %d\n", RenderCommandToString((GLRRenderCommand)i), frameData.profile.commandCounts[i]);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
memset(frameData.profile.commandCounts, 0, sizeof(frameData.profile.commandCounts));
|
|
|
|
profilePassesString_ = cmdString + profilePassesString_;
|
|
|
|
#endif
|
|
|
|
|
|
|
|
frameData.profile.passesString.clear();
|
|
|
|
}
|
|
|
|
|
2017-11-18 15:42:39 +01:00
|
|
|
curFrame_++;
|
2020-02-29 23:40:55 -08:00
|
|
|
if (curFrame_ >= inflightFrames_)
|
2017-11-18 21:51:17 +01:00
|
|
|
curFrame_ = 0;
|
2017-11-18 15:42:39 +01:00
|
|
|
|
|
|
|
insideFrame_ = false;
|
|
|
|
}
|
|
|
|
|
2022-11-08 16:56:47 +01:00
|
|
|
// Render thread. Returns true if the caller should handle a swap.
|
|
|
|
bool GLRenderManager::Run(GLRRenderThreadTask &task) {
|
2023-02-06 10:46:52 +01:00
|
|
|
GLFrameData &frameData = frameData_[task.frame];
|
2022-11-08 16:56:47 +01:00
|
|
|
|
2017-11-18 15:42:39 +01:00
|
|
|
if (!frameData.hasBegun) {
|
|
|
|
frameData.hasBegun = true;
|
2022-10-03 15:56:30 +02:00
|
|
|
|
|
|
|
frameData.deleter_prev.Perform(this, skipGLCalls_);
|
|
|
|
frameData.deleter_prev.Take(frameData.deleter);
|
2017-11-18 15:42:39 +01:00
|
|
|
}
|
2018-02-11 11:17:34 -08:00
|
|
|
|
2017-11-18 15:42:39 +01:00
|
|
|
// queueRunner_.LogSteps(stepsOnThread);
|
2022-10-03 15:56:30 +02:00
|
|
|
queueRunner_.RunInitSteps(task.initSteps, skipGLCalls_);
|
2017-11-18 15:42:39 +01:00
|
|
|
|
2022-09-04 14:08:03 +02:00
|
|
|
// Run this after RunInitSteps so any fresh GLRBuffers for the pushbuffers can get created.
|
2018-10-06 13:24:50 +02:00
|
|
|
if (!skipGLCalls_) {
|
|
|
|
for (auto iter : frameData.activePushBuffers) {
|
2022-09-04 14:08:03 +02:00
|
|
|
iter->Flush();
|
|
|
|
iter->UnmapDevice();
|
2018-10-06 13:24:50 +02:00
|
|
|
}
|
2018-02-11 11:17:34 -08:00
|
|
|
}
|
2022-09-04 12:36:52 +02:00
|
|
|
|
2023-05-17 14:38:11 +02:00
|
|
|
if (frameData.profile.enabled) {
|
|
|
|
frameData.profile.cpuStartTime = time_now_d();
|
|
|
|
}
|
|
|
|
|
2022-11-06 14:36:51 +01:00
|
|
|
if (IsVREnabled()) {
|
2022-10-17 19:07:27 +02:00
|
|
|
int passes = GetVRPassesCount();
|
2022-09-14 20:36:33 +02:00
|
|
|
for (int i = 0; i < passes; i++) {
|
|
|
|
PreVRFrameRender(i);
|
2023-05-24 14:08:19 +02:00
|
|
|
queueRunner_.RunSteps(task.steps, frameData, skipGLCalls_, i < passes - 1, true);
|
2022-09-14 20:36:33 +02:00
|
|
|
PostVRFrameRender();
|
2022-09-04 12:36:52 +02:00
|
|
|
}
|
|
|
|
} else {
|
2023-05-24 14:08:19 +02:00
|
|
|
queueRunner_.RunSteps(task.steps, frameData, skipGLCalls_, false, false);
|
2022-09-04 12:36:52 +02:00
|
|
|
}
|
2018-02-11 11:17:34 -08:00
|
|
|
|
2023-05-17 14:38:11 +02:00
|
|
|
if (frameData.profile.enabled) {
|
|
|
|
frameData.profile.cpuEndTime = time_now_d();
|
|
|
|
}
|
|
|
|
|
2022-09-04 14:01:17 +02:00
|
|
|
if (!skipGLCalls_) {
|
|
|
|
for (auto iter : frameData.activePushBuffers) {
|
2022-09-04 14:08:03 +02:00
|
|
|
iter->MapDevice(bufferStrategy_);
|
2022-09-04 14:01:17 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-11-08 16:56:47 +01:00
|
|
|
bool swapRequest = false;
|
2022-10-03 12:51:21 +02:00
|
|
|
|
2022-11-08 16:56:47 +01:00
|
|
|
switch (task.runType) {
|
|
|
|
case GLRRunType::PRESENT:
|
2022-10-03 12:51:21 +02:00
|
|
|
if (!frameData.skipSwap) {
|
|
|
|
if (swapIntervalChanged_) {
|
|
|
|
swapIntervalChanged_ = false;
|
|
|
|
if (swapIntervalFunction_) {
|
|
|
|
swapIntervalFunction_(swapInterval_);
|
|
|
|
}
|
|
|
|
}
|
2022-10-03 15:56:30 +02:00
|
|
|
// This is the swapchain framebuffer flip.
|
2022-10-03 12:51:21 +02:00
|
|
|
if (swapFunction_) {
|
2022-11-08 16:56:47 +01:00
|
|
|
VLOG(" PULL: SwapFunction()");
|
2022-10-03 12:51:21 +02:00
|
|
|
swapFunction_();
|
2023-01-30 15:45:05 +01:00
|
|
|
if (!retainControl_) {
|
|
|
|
// get out of here.
|
|
|
|
swapRequest = true;
|
|
|
|
}
|
2022-11-08 16:56:47 +01:00
|
|
|
} else {
|
|
|
|
VLOG(" PULL: SwapRequested");
|
|
|
|
swapRequest = true;
|
2022-10-03 12:51:21 +02:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
frameData.skipSwap = false;
|
|
|
|
}
|
2022-11-08 16:56:47 +01:00
|
|
|
frameData.hasBegun = false;
|
|
|
|
|
|
|
|
VLOG(" PULL: Frame %d.readyForFence = true", task.frame);
|
|
|
|
|
|
|
|
{
|
|
|
|
std::lock_guard<std::mutex> lock(frameData.fenceMutex);
|
|
|
|
frameData.readyForFence = true;
|
|
|
|
frameData.fenceCondVar.notify_one();
|
|
|
|
// At this point, we're done with this framedata (for now).
|
|
|
|
}
|
|
|
|
|
2017-11-18 15:42:39 +01:00
|
|
|
break;
|
|
|
|
|
|
|
|
case GLRRunType::SYNC:
|
2022-10-03 15:56:30 +02:00
|
|
|
frameData.hasBegun = false;
|
|
|
|
|
2022-10-03 12:51:21 +02:00
|
|
|
// glFinish is not actually necessary here, and won't be unless we start using
|
|
|
|
// glBufferStorage. Then we need to use fences.
|
|
|
|
{
|
2022-10-03 15:56:30 +02:00
|
|
|
std::unique_lock<std::mutex> lock(syncMutex_);
|
|
|
|
syncDone_ = true;
|
|
|
|
syncCondVar_.notify_one();
|
2022-10-03 12:51:21 +02:00
|
|
|
}
|
2017-11-18 15:42:39 +01:00
|
|
|
break;
|
|
|
|
|
|
|
|
default:
|
2020-08-16 00:38:55 +02:00
|
|
|
_assert_(false);
|
2017-11-18 15:42:39 +01:00
|
|
|
}
|
2022-11-08 16:56:47 +01:00
|
|
|
VLOG(" PULL: ::Run(): Done running tasks");
|
|
|
|
return swapRequest;
|
2017-11-18 15:42:39 +01:00
|
|
|
}
|
|
|
|
|
2017-12-19 14:35:24 +01:00
|
|
|
void GLRenderManager::FlushSync() {
|
2018-02-08 00:23:48 +01:00
|
|
|
{
|
2022-11-08 16:56:47 +01:00
|
|
|
VLOG("PUSH: Frame[%d].readyForRun = true (sync)", curFrame_);
|
2022-10-03 15:56:30 +02:00
|
|
|
|
2023-05-16 23:14:54 +02:00
|
|
|
GLRRenderThreadTask *task = new GLRRenderThreadTask(GLRRunType::SYNC);
|
|
|
|
task->frame = curFrame_;
|
2022-10-03 15:56:30 +02:00
|
|
|
|
|
|
|
std::unique_lock<std::mutex> lock(pushMutex_);
|
|
|
|
renderThreadQueue_.push(task);
|
2023-05-16 23:14:54 +02:00
|
|
|
renderThreadQueue_.back()->initSteps = std::move(initSteps_);
|
|
|
|
renderThreadQueue_.back()->steps = std::move(steps_);
|
2022-10-03 15:56:30 +02:00
|
|
|
pushCondVar_.notify_one();
|
2017-12-19 14:35:24 +01:00
|
|
|
steps_.clear();
|
|
|
|
}
|
2022-10-03 15:56:30 +02:00
|
|
|
|
2018-02-08 00:23:48 +01:00
|
|
|
{
|
2022-10-03 15:56:30 +02:00
|
|
|
std::unique_lock<std::mutex> lock(syncMutex_);
|
2017-12-19 14:35:24 +01:00
|
|
|
// Wait for the flush to be hit, since we're syncing.
|
2022-10-03 15:56:30 +02:00
|
|
|
while (!syncDone_) {
|
2022-11-08 16:56:47 +01:00
|
|
|
VLOG("PUSH: Waiting for frame[%d].readyForFence = 1 (sync)", curFrame_);
|
2022-10-03 15:56:30 +02:00
|
|
|
syncCondVar_.wait(lock);
|
2018-01-20 12:05:30 -08:00
|
|
|
}
|
2022-10-03 15:56:30 +02:00
|
|
|
syncDone_ = false;
|
2018-01-20 12:05:30 -08:00
|
|
|
}
|
|
|
|
}
|