OpenGL 使用离屏渲染技术进行截图


背景

我有一个Duilib项目,目前已经实现了将OpenGL窗口嵌入到该项目中,并且能够加载obj文件以显示。这是我的效果:
在这里插入图片描述

第三方库

  1. glfw-3.3.8(创建OpenGL窗口)
  2. glad-0.1.33(使用OpenGL函数)
  3. glm-0.9.9.8(数学库)
  4. tinyobjloader-2.0.0rc13(读取Obj文件)

注意

  1. 因为Duilib项目是主线程,OpenGL窗口是嵌入进去,但是它也要自己的线程,所以只能作为子线程去渲染。
  2. 对于我的项目,我仍然需要去控制OpenGL窗口的显示和隐藏。一个具体的需求是,我需要在OpenGL窗口隐藏的场景下,对窗口渲染内容进行截图。所以就引出了离屏渲染技术。

参考资料

LearnOpenGL:非常好用的OpenGL学习资料,如果能认真过一遍,相信就能完成大部分的OpenGL需求了。
ChatGPT:在有基本概念之后,使用AI能大幅提高编程速度。


一、离屏渲染

先通过 LearnOpenGL-帧缓冲 了解一些基本。

(一)帧缓冲与帧缓冲对象(FBO)

简而言之,帧缓冲是一种技术,你可以等价为双缓冲算法。
帧缓冲对象是一个OpenGL对象,与什么VAO, VBO本质上没什么差别,就是一个名称标识。
你可以把FBO当成是MFC中CDC对象,绑定之后就可以在上面绘制各种内容。至于绘制完成之后,你想要拿去做什么,显示在屏幕或者截图,那是你自己的操作。

(二)附件(Attachment)

正如CDC一样,FBO也只是一个标识,如果需要绘制内容,仍然需要绑定画布(Bitmap)才能绘制内容。只不过在FBO中,画布被区分了不同的类型,分别用于绘制不同的内容,比如颜色附件和深度附件。它们通过glTexImage2D()来制定纹理的类型。

二、具体代码

总体的思路是:

  1. 当窗口尺寸改变时,主动创建FBO相关内容。
  2. 主线程要进行截图时,将m_bStartCapture = true; ,通知子线程将内容绘制在FBO上;
  3. 等待绘制完成,根据需要将像素上下反转(因为OpenGL是左下角是原点,而Bitmap图像是左上角为原点)。

(一)主线程创建OpenGL窗口

bool MyOpenGLWnd::CreateOpenGLWnd(HWND hWnd)
{
	// 1. Init GLFW library
	glfwInit();
	glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
	glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
	glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);

	// 2. Create GLFW window
	m_pGlfwWnd = glfwCreateWindow(400, 400, "OpenGL", NULL, NULL);
	if (m_pGlfwWnd == NULL)
	{
		m_pLogger->error("Failed to create GLFW window");
		glfwTerminate();
		return false;
	}

	// 3. Retrieve the handle of the glfw window and embed it into the parent window
	HWND hwndGLFW = glfwGetWin32Window(m_pGlfwWnd);
	SetWindowLongW(hwndGLFW, GWL_STYLE, WS_VISIBLE);
	MoveWindow(hwndGLFW, 0, 0, 0, 0, TRUE);
	SetParent(hwndGLFW, hWnd);

	// 4. Set the GLFW window context as the main context
	glfwMakeContextCurrent(m_pGlfwWnd);

	// 5. Register the GLAD function address (context must be set for it to take effect)
	if (!gladLoadGLLoader((GLADloadproc)glfwGetProcAddress))
	{
		m_pLogger->error("Failed to initialize GLAD");
		glfwTerminate();
		return false;
	}

	// 6. Enable depth test
	glEnable(GL_DEPTH_TEST);

	// 7. Create shader object
	m_pShader = new Shader;
	std::string error;
	if (!m_pShader->Load("./shader/Shader.vs", "./shader/Shader.fs", error))
	{
		m_pLogger->error(error);
		glfwTerminate();
		return false;
	}

	// 8. Create camera object
	m_pCamera = new Camera(glm::vec3(0.0f, 0.0f, 3.0f));

	// 9. Reset the context because the render procedure will be run in the child thread
	glfwMakeContextCurrent(NULL);

	// 10. Create render thread
	createRenderThread();

	return true;
}

(二)渲染子线程

void MyOpenGLWnd::createRenderThread()
{
	m_pRenderThread = new std::thread([this]() {
		glfwMakeContextCurrent(m_pGlfwWnd);

		// 2. Register the callback function we need
		glfwSetFramebufferSizeCallback(m_pGlfwWnd, [](GLFWwindow* window, int width, int height) {
			auto pThisClass = static_cast<MyOpenGLWnd*>(glfwGetWindowUserPointer(window));
			if (pThisClass) {
				pThisClass->frameSizeCallback(window, width, height);
			}
		});

		glfwSetMouseButtonCallback(m_pGlfwWnd, [](GLFWwindow* window, int button, int action, int mods) {
			auto pThisClass = static_cast<MyOpenGLWnd*>(glfwGetWindowUserPointer(window));
			if (pThisClass) {
				pThisClass->mouseBtnCallback(window, button, action, mods);
			}
		});

		glfwSetCursorPosCallback(m_pGlfwWnd, [](GLFWwindow* window, double xposIn, double yposIn) {
			auto pThisClass = static_cast<MyOpenGLWnd*>(glfwGetWindowUserPointer(window));
			if (pThisClass) {
				pThisClass->mousePosCallback(window, xposIn, yposIn);
			}
		});

		glfwSetScrollCallback(m_pGlfwWnd, [](GLFWwindow* window, double xoffset, double yoffset) {
			auto pThisClass = static_cast<MyOpenGLWnd*>(glfwGetWindowUserPointer(window));
			if (pThisClass) {
				pThisClass->scrollCallback(window, xoffset, yoffset);
			}
		});

		glfwSetKeyCallback(m_pGlfwWnd, [](GLFWwindow* window, int key, int scancode, int action, int mods) {
			auto pThisClass = static_cast<MyOpenGLWnd*>(glfwGetWindowUserPointer(window));
			if (pThisClass) {
				pThisClass->keyCallback(window, key, scancode, action, mods);
			}
		});
		glfwSetWindowUserPointer(m_pGlfwWnd, this);

		// 3. Render loop
		while (!glfwWindowShouldClose(m_pGlfwWnd))
		{
			// If window is not visible, just continue.
			if (!glfwGetWindowAttrib(m_pGlfwWnd, GLFW_VISIBLE) && !m_bForceRendering)
			{
				Sleep(10);
				continue;
			}

			// 4. Check the whether the mesh needs to update
			checkVerticeUpdate();

			// 5. Check window size
			checkFrameSize();

			// 6. Check mouse position
			checkMousePos();

			// 7. Check mouse wheel              
			checkScrollOffset();

			// 8. Chek the keydown
			checkKeyDown();

			// 9. Update the timing
			float currentFrameTime = static_cast<float>(glfwGetTime());
			m_deltaTime = currentFrameTime - m_lastFrameTime;
			m_lastFrameTime = currentFrameTime;

			bool bCapture = m_bStartCapture;
			if (bCapture)
			{
				glBindFramebuffer(GL_FRAMEBUFFER, m_frameBufferObj._name);
			}
			else
			{
				glBindFramebuffer(GL_FRAMEBUFFER, 0);
			}

			// 10. Refresh color buffer and depth
			glClearColor(m_clearColor.r, m_clearColor.g, m_clearColor.b, m_clearColor.a);
			glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

			// 11. Use shader
			m_pShader->use();

#pragma region 12. Set vertices shader
			// 12.1 pass projection matrix to shader (note that in this case it could change every frame)
			const auto& frameSize = m_frameSize.load();
			glm::mat4 projection{ 1.0f };
			if (frameSize.y > 0)
			{
				ReadLock(m_cameraMutex);
				projection = glm::perspective(glm::radians(m_pCamera->Zoom), (float)m_frameSize.load().x / (float)m_frameSize.load().y, 0.1f, 100.0f);
			}
			m_pShader->setMat4("projection", projection);

			// 12.2 camera/view transformation
			{
				ReadLock(m_cameraMutex);
				glm::mat4 view = m_pCamera->GetViewMatrix();
				m_pShader->setMat4("view", view);
			}

			// 12.3 model transformation
			glm::mat4 model = glm::mat4(1.0f);
			if (m_bAutorotate) {
				updateRotationAngles(m_deltaTime);
			}
			model = glm::rotate(model, glm::radians(m_modelRotAngle.x), glm::vec3(1.0f, 0.0f, 0.0f));
			model = glm::rotate(model, glm::radians(m_modelRotAngle.y), glm::vec3(0.0f, 1.0f, 0.0f));
			m_pShader->setMat4("model", model);
#pragma endregion

#pragma region 13. Set fragment shader
			{
				ReadLock(m_cameraMutex);
				m_pShader->setVec3("viewPos", m_pCamera->Position);
			}

			// light properties
			glm::vec3 lightColor{ 1.0, 1.0, 1.0 };

			glm::vec3 diffuseColor = lightColor * glm::vec3(0.5f); // decrease the influence
			glm::vec3 ambientColor = diffuseColor * glm::vec3(0.2f); // low influence
			m_pShader->setVec3("light.ambient", ambientColor);
			m_pShader->setVec3("light.diffuse", diffuseColor);
			m_pShader->setVec3("light.specular", 1.0f, 1.0f, 1.0f);

			m_pShader->setVec3("light.direction", glm::normalize(glm::vec3(-0.5f, -0.5f, -0.5f)));
#pragma endregion

			// 14. Draw elements
			for (const Mesh& mesh : m_vMeshs)
			{
				m_pShader->setVec3("material.ambient", mesh.ambient);
				m_pShader->setVec3("material.diffuse", mesh.diffuse);
				m_pShader->setVec3("material.specular", mesh.specular); // specular lighting doesn't have full effect on this object's material
				m_pShader->setFloat("material.shininess", mesh.shininess);

				glBindVertexArray(mesh.VAO); // seeing as we only have a single VAO there's no need to bind it every time, but we'll do so to keep things a bit more organized
				glDrawElements(GL_TRIANGLES, mesh.indices.size(), GL_UNSIGNED_INT, 0);
				glBindVertexArray(0);
			}

			for (const auto& mesh : m_vPlaneMeshs)
			{
				m_pShader->setVec3("material.ambient", mesh.ambient);
				m_pShader->setVec3("material.diffuse", mesh.diffuse);
				m_pShader->setVec3("material.specular", mesh.specular); // specular lighting doesn't have full effect on this object's material
				m_pShader->setFloat("material.shininess", mesh.shininess);

				glBindVertexArray(mesh.VAO); // seeing as we only have a single VAO there's no need to bind it every time, but we'll do so to keep things a bit more organized
				glDrawElements(GL_TRIANGLES, mesh.indices.size(), GL_UNSIGNED_INT, 0);
				glBindVertexArray(0);
			}

			if (bCapture && m_bForceRendering)
			{
				auto frameSize = m_frameSize.load(std::memory_order_relaxed);
				size_t w = frameSize.x;
				size_t h = frameSize.y;
				size_t c = m_capImageChannel;
				size_t pixelSize = w * h * c; 
				
				m_vCaptureDatas.clear();
				m_vCaptureDatas.resize(pixelSize);
				glReadPixels(0, 0, w, h, GL_RGBA, GL_UNSIGNED_BYTE, m_vCaptureDatas.data());

				glBindFramebuffer(GL_FRAMEBUFFER, 0);

				m_bStartCapture = false;
				m_bForceRendering = false;
			}

			// 15. Update buffer
			glfwSwapBuffers(m_pGlfwWnd);
			glfwPollEvents();
		}

		// 16. release data
		for (auto& mesh : m_vMeshs)
		{
			glDeleteVertexArrays(1, &mesh.VAO);
			glDeleteBuffers(1, &mesh.VBO_1);
			glDeleteBuffers(1, &mesh.VBO_2);
			glDeleteBuffers(1, &mesh.EBO);
		}

		for (auto& mesh : m_vPlaneMeshs)
		{
			glDeleteVertexArrays(1, &mesh.VAO);
			glDeleteBuffers(1, &mesh.VBO_1);
			glDeleteBuffers(1, &mesh.VBO_2);
			glDeleteBuffers(1, &mesh.EBO);
		}

		m_frameBufferObj.Clear();

		glfwDestroyWindow(m_pGlfwWnd);
	});
}

(三)窗口尺寸改变的回调函数

/*
	因为窗口尺寸改变是在主线程,渲染是在子线程,所以只能子线程时刻检查
*/
void MyOpenGLWnd::checkFrameSize()
{
	auto frameSize = m_frameSize.load(std::memory_order_relaxed);
	if (frameSize.bUpdate)
	{
		glViewport(0, 0, frameSize.x, frameSize.y);
		m_frameSize.store(FrameSize(false, frameSize.x, frameSize.y), std::memory_order_relaxed);

		createFBO();
	}
}

(四)创建FBO

/*
* @brief	Create FBO for off-line rendering
* @note
*
* @author 	Canliang Wu
* @day		2024/09/26
*/
void MyOpenGLWnd::createFBO()
{
	auto frameSize = m_frameSize.load(std::memory_order_relaxed);
	int width = frameSize.x;
	int height = frameSize.y;

	// Generate fbo
	if (m_frameBufferObj._name == 0)
	{
		glGenFramebuffers(1, &m_frameBufferObj._name);
	}
	glBindFramebuffer(GL_FRAMEBUFFER, m_frameBufferObj._name);

	// Create textures for fbo
	if (!m_frameBufferObj._vTextures.empty())
	{
		glDeleteTextures(m_frameBufferObj._vTextures.size(), m_frameBufferObj._vTextures.data());
		m_frameBufferObj._vTextures.clear();
	}
	m_frameBufferObj._vTextures.resize(2);
	glGenTextures(m_frameBufferObj._vTextures.size(), m_frameBufferObj._vTextures.data());

	// Set color texture attributes
	glBindTexture(GL_TEXTURE_2D, m_frameBufferObj._vTextures[0]);
	glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);

	// Add color texture
	glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, m_frameBufferObj._vTextures[0], 0);

	// Set depth texture attributes (If there is a lack of depth texture, the screenshot will become fragmented)
	glBindTexture(GL_TEXTURE_2D, m_frameBufferObj._vTextures[1]);
	glTexImage2D(GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT, width, height, 0, GL_DEPTH_COMPONENT, GL_FLOAT, NULL);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);

	// Add depth texture
	glFramebufferTexture2D(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_TEXTURE_2D, m_frameBufferObj._vTextures[1], 0);

	// Check the fbo is complete
	if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
		std::cerr << "Framebuffer is not complete!" << std::endl;
	}

	glBindTexture(GL_TEXTURE_2D, 0);		// unbind Texture
	glBindFramebuffer(GL_FRAMEBUFFER, 0);	// unbind Framebuffer
}

(五)主线程主动调用截图函数

/*
* @brief	Capture glfw image no matter whether the window is displayed or hidden
* @param	vImageDatas - image source data
* @param	bResize - when the glfw is hidden, need to set the viewport directly
* @param	w - viewport width
* @param	h - viewport height
* @note
*
* @author 	Canliang Wu
* @day		2024/09/26
*/
void MyOpenGLWnd::CaptureScreen(std::vector<unsigned char>& vImageDatas, bool bResize, int w, int h)
{
	// 1. Resize the viewport when the glfw window is hiding
	if (bResize)
	{
		m_frameSize.store(FrameSize(true, w, h), std::memory_order_relaxed);
	}

	// 2. Start capture 
	m_bStartCapture = true;
	m_bForceRendering = true;
	while (m_bStartCapture)
	{
		Sleep(10);
	}

	// 3. Reverse the image
	auto frameSize = m_frameSize.load(std::memory_order_relaxed);

	vImageDatas.resize(m_vCaptureDatas.size());
	for (int y = 0; y < frameSize.y; ++y) {
		for (int x = 0; x < frameSize.x; ++x) {
			// 原始像素位置
			int originalIndex = (frameSize.y - 1 - y) * frameSize.x * m_capImageChannel + x * m_capImageChannel;
			// 翻转后的像素位置
			int flippedIndex = y * frameSize.x * m_capImageChannel + x * m_capImageChannel;

			// 复制 RGB 值
			vImageDatas[flippedIndex] = m_vCaptureDatas[originalIndex];         // R
			vImageDatas[flippedIndex + 1] = m_vCaptureDatas[originalIndex + 1]; // G
			vImageDatas[flippedIndex + 2] = m_vCaptureDatas[originalIndex + 2]; // B
			vImageDatas[flippedIndex + 3] = m_vCaptureDatas[originalIndex + 3]; // A
		}
	}
}

三、注意

这里必须添加深度附件,不然就会出现这样的结果:
在这里插入图片描述

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值