集成Agora或者zego得都知道,这两者都支持自绘制或者让SDK绘制,SDK绘制有缺陷:
1.改变大小由于未能及时通知agora绘制得视频会闪烁
2.sdk同时只能在一个窗口绘制
拿到agora采集得数据自己绘制可以解决上面得缺陷。
声网支持两种方式,一种是继承接口方式,一种是设置一个回调接口。两种方式都一样。接口如下: (zego跟agora类似),如果接口返回的数据格式不是yuv,可以使用libyuv库(跨平台库)进行转化。
bool onCaptureVideoFrame(VideoFrame& videoFrame) override;
bool onRenderVideoFrame(unsigned int uid, VideoFrame& videoFrame) override;
VideoFrame中包含了视频数据,格式是yuv的。
这两个接口抛出来的数据在声网的线程,Qt不支持在非主线程中绘制窗体,所以需要将数据通过信号和槽抛到主线程,然后进行绘制,Agora的数据声网会自动销毁,所以这里要进行保存,使用QByteArray来进行数据的保存.
void copyFrameData(VideoFrame& videoFrame, QSharedPointer<QByteArray>& arrayStr)
{
char* pNewYuvBuf = (char*)videoFrame.yBuffer;
char* pNewUBuf = (char*)videoFrame.uBuffer;
char* pNewVBuf = (char*)videoFrame.vBuffer;
//分行拷贝
if (videoFrame.yStride > videoFrame.width)
{
pNewYuvBuf = new char [videoFrame.width * videoFrame.height * 3 / 2];
pNewUBuf = pNewYuvBuf + videoFrame.width * videoFrame.height;
pNewVBuf = pNewUBuf + videoFrame.width * videoFrame.height / 4;
char* yBuf1 = (char*)pNewYuvBuf;
char* yBuf2 = (char*)videoFrame.yBuffer;
for (int h = 0; h < videoFrame.height; ++h)
{
if (h > 0)
{
memcpy(yBuf1, yBuf2, videoFrame.width);
}
yBuf1 += videoFrame.width;
yBuf2 += videoFrame.yStride;
}
char* uBuf1 = (char*)pNewUBuf;
char* uBuf2 = (char*)videoFrame.uBuffer;
for (int h = 0; h < videoFrame.height / 2; ++h)
{
if (h > 0)
{
memcpy(uBuf1, uBuf2, videoFrame.width / 2);
}
uBuf1 += videoFrame.width / 2;
uBuf2 += videoFrame.uStride;
}
char* vBuf1 = (char*)pNewVBuf;
char* vBuf2 = (char*)videoFrame.vBuffer;
for (int h = 0; h < videoFrame.height / 2; ++h)
{
if (h > 0)
{
memcpy(vBuf1, vBuf2, videoFrame.width / 2);
}
vBuf1 += videoFrame.width / 2;
vBuf2 += videoFrame.vStride;
}
}
arrayStr->append(pNewYuvBuf, videoFrame.width * videoFrame.height);
arrayStr->append(pNewUBuf, videoFrame.width * videoFrame.height / 4);
arrayStr->append(pNewVBuf, videoFrame.width * videoFrame.height / 4);
if (videoFrame.yStride > videoFrame.width)
{
delete[] pNewYuvBuf;
}
}
然后将保存好的 QSharedPointer<QByteArray>& arrayStr数据通过信号发送主线程进行绘制。
代码如下:
头文件:
#ifndef VIDEORENDEROPENGL_H
#define VIDEORENDEROPENGL_H
#include <QtOpenGL>
#include <QOpenGLFunctions>
class VideoRenderOpenGLWidget : public QOpenGLWidget, protected QOpenGLFunctions
{
Q_OBJECT
public:
explicit VideoRenderOpenGLWidget(QWidget *parent = 0);
~VideoRenderOpenGLWidget();
//设置视频的宽高(agora数据结构中返回的宽高)
void setVideoParameters(int w, int h);
//设置视频数据
void setFrameData(const QSharedPointer<QByteArray>& data);
//是否镜像
void setMirror(bool bMirror);
protected:
void bind();
void bindPlane(int p);
void initializeShader();
void initTextures();
virtual void initializeGL();
virtual void paintGL();
virtual void resizeGL(int w, int h);
void clear();
private:
bool update_res;
bool upload_tex;
int width;
int height;
QByteArray m_data;
typedef struct
{
char* data;
int stride;
GLint internal_fmt;
GLenum fmt;
GLenum type;
int bpp;
QSize tex_size;
QSize upload_size;
} Plane;
QVector<Plane> plane;
GLuint tex [3];
int u_MVP_matrix, u_colorMatrix, u_Texture [3];
QOpenGLShaderProgram *m_program;
QMutex m_mutex;
QMatrix4x4 m_mat;
bool m_bMirror{false};
};
#endif // VIDEORENDEROPENGL_H
源文件:
#include "VideoRenderOpenGLWidget.h"
static const QMatrix4x4 yuv2rgb_bt601 =
QMatrix4x4(
1.0f, 0.000f, 1.402f, 0.0f,
1.0f, -0.344f, -0.714f, 0.0f,
1.0f, 1.772f, 0.000f, 0.0f,
0.0f, 0.000f, 0.000f, 1.0f)
*
QMatrix4x4(
1.0f, 0.0f, 0.0f, 0.0f,
0.0f, 1.0f, 0.0f, -0.5f,
0.0f, 0.0f, 1.0f, -0.5f,
0.0f, 0.0f, 0.0f, 1.0f);
const GLfloat kVertices[] = {
-1, 1,
-1, -1,
1, 1,
1, -1,
};
// 原始矩阵
const GLfloat kTexCoordsMirror[] = {
0, 0,
0, 1,
1, 0,
1, 1,
};
//x轴镜像
const GLfloat kTexCoords[] = {
1, 0,
1, 1,
0, 0,
0, 1,
};
char const *const* BukaAttributes()
{
static const char a0[] = {0x61, 0x5f, 0x50, 0x6f, 0x73, 0x0}; //a_Pos
static const char a1[] = {0x61, 0x5f, 0x54, 0x65, 0x78, 0x0}; //a_Tex
static const char a2[] = {0x00, 0x51, 0x74, 0x41, 0x56, 0x0}; //
static const char* A[] = { a0, a1, a2};
return A;
}
typedef struct
{
QImage::Format qfmt;
GLint internal_fmt;
GLenum fmt;
GLenum type;
int bpp;
} gl_fmt_entry_t;
#define glsl(x) #x
static const char kVertexShader[] = glsl(
attribute vec4 a_Pos;
attribute vec2 a_Tex;
uniform mat4 u_MVP_matrix;
varying vec2 v_TexCoords;
void main()
{
gl_Position = u_MVP_matrix * a_Pos;
v_TexCoords = a_Tex;
}
);
static const char kFragmentShader[] = glsl(
uniform sampler2D u_Texture0;
uniform sampler2D u_Texture1;
uniform sampler2D u_Texture2;
varying mediump vec2 v_TexCoords;
uniform mat4 u_colorMatrix;
void main()
{
gl_FragColor = clamp(u_colorMatrix
* vec4(
texture2D(u_Texture0, v_TexCoords).r,
texture2D(u_Texture1, v_TexCoords).r,
texture2D(u_Texture2, v_TexCoords).r,
1)
, 0.0, 1.0);
}
);
static const char kFragmentShaderRGB[] = glsl(
uniform sampler2D u_Texture0;
varying mediump vec2 v_TexCoords;
void main()
{
vec4 c = texture2D(u_Texture0, v_TexCoords);
gl_FragColor = c.rgba;
}
);
#undef glsl
VideoRenderOpenGLWidget::VideoRenderOpenGLWidget(QWidget *parent)
: QOpenGLWidget(parent)
, update_res(true)
, upload_tex(true)
, m_program(nullptr),
width(1),height(1)
{
QSurfaceFormat format;
format.setDepthBufferSize(0);
format.setStencilBufferSize(0);
format.setSamples(4);
setFormat(format);
memset(tex, 0, 3);
}
VideoRenderOpenGLWidget::~VideoRenderOpenGLWidget()
{
makeCurrent();
clear();
if (m_program)
{
m_program->release();
delete m_program;
m_program = 0;
}
doneCurrent();
}
void VideoRenderOpenGLWidget::setFrameData(const QSharedPointer<QByteArray>& data)
{
QMutexLocker lock(&m_mutex);
Q_UNUSED(lock);
upload_tex = true;
m_data = *data.data();
if (m_data.isEmpty())
{
return;
}
char* pData = m_data.data();
if (pData == NULL){
return;
}
if (plane.isEmpty())
{
return;
}
plane [0].data = pData;
if (plane.size() > 2)
{
plane [1].data = plane [0].data + plane [0].stride * height;
plane [2].data = plane [1].data + plane [1].stride * height / 2;
}
else {
}
//重绘
update();
}
void VideoRenderOpenGLWidget::setMirror(bool bMirror)
{
m_bMirror = bMirror;
if (!m_data.isEmpty()) {
update();
}
}
void VideoRenderOpenGLWidget::bind()
{
for (int i = 0; i < plane.size(); ++i)
{
bindPlane((i + 1) % plane.size());
}
upload_tex = false;
}
void VideoRenderOpenGLWidget::bindPlane(int p)
{
glActiveTexture(GL_TEXTURE0 + p);
glBindTexture(GL_TEXTURE_2D, tex [p]);
if (!upload_tex)
{
return;
}
// This is necessary for non-power-of-two textures
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
const Plane &P = plane [p];
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, P.upload_size.width(), P.upload_size.height(), P.fmt, P.type, P.data);
}
void VideoRenderOpenGLWidget::initTextures()
{
glDeleteTextures(3, tex);
memset(tex, 0, 3);
glGenTextures(plane.size(), tex);
for (int i = 0; i < plane.size(); ++i)
{
const Plane &P = plane [i];
glBindTexture(GL_TEXTURE_2D, tex [i]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
// This is necessary for non-power-of-two textures
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexImage2D(GL_TEXTURE_2D, 0, P.internal_fmt, P.tex_size.width(), P.tex_size.height(), 0 /*border, ES not support*/, P.fmt, P.type, NULL);
glBindTexture(GL_TEXTURE_2D, 0);
}
}
void VideoRenderOpenGLWidget::setVideoParameters(int w, int h)
{
if (width == w && height == h)
{
return;
}
QMutexLocker lock(&m_mutex);
Q_UNUSED(lock);
update_res = true;
m_data.clear();
width = w;
height = h;
plane.resize(3);
Plane &p = plane [0];
p.data = 0;
p.stride = w;
p.tex_size.setWidth(p.stride);
p.upload_size.setWidth(p.stride);
p.tex_size.setHeight(h);
p.upload_size.setHeight(h);
p.internal_fmt = p.fmt = GL_LUMINANCE;
p.type = GL_UNSIGNED_BYTE;
p.bpp = 1;
for (int i = 1; i < plane.size(); ++i)
{
Plane &p = plane [i];
p.stride = w / 2;
p.tex_size.setWidth(p.stride);
p.upload_size.setWidth(p.stride);
p.tex_size.setHeight(h / 2);
p.upload_size.setHeight(h / 2);
p.internal_fmt = p.fmt = GL_LUMINANCE;
p.type = GL_UNSIGNED_BYTE;
p.bpp = 1;
}
}
void VideoRenderOpenGLWidget::paintGL()
{
clear();
if (!this->isVisible()){
return QOpenGLWidget::paintGL();
}
for (GLenum err; (err = glGetError()) != GL_NO_ERROR;)
{
}
QMutexLocker lock(&m_mutex);
Q_UNUSED(lock);
if (m_data.isEmpty()) {
return QOpenGLWidget::paintGL();
}
if (!plane.isEmpty() && !plane [0].data)
{
return;
}
if (update_res || !tex [0])
{
initializeShader();
initTextures();
update_res = false;
}
bind();
if(!m_program)
{
return;
}
if (!m_program->bind()){
return;
}
if(!m_program->log().isEmpty()){
}
for (int i = 0; i < plane.size(); ++i)
{
m_program->setUniformValue(u_Texture [i], (GLint)i);
}
m_program->setUniformValue(u_colorMatrix, yuv2rgb_bt601);
m_program->setUniformValue(u_MVP_matrix, m_mat);
m_program->setAttributeArray(0, GL_FLOAT, kVertices, 2);
if (m_bMirror){
m_program->setAttributeArray(1, GL_FLOAT, kTexCoordsMirror, 2);
}
else {
m_program->setAttributeArray(1, GL_FLOAT, kTexCoords, 2);
}
char const *const *attr = BukaAttributes();
for (int i = 0; attr [i] [0]; ++i)
{
//TODO: in setActiveShader
m_program->enableAttributeArray(i);
}
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
for (int i = 0; attr [i] [0]; ++i)
{
//TODO: in setActiveShader
if (m_program != NULL){
m_program->disableAttributeArray(i);
}
}
m_program->release();
}
void VideoRenderOpenGLWidget::initializeGL()
{
static const struct {
const char *extension;
int major;
int minor;
} required_extensions[] = {
{ "GL_ARB_multitexture", 1, 3 },
{ "GL_ARB_vertex_buffer_object", 1, 5 }, //GLX_ARB_vertex_buffer_object
{ "GL_ARB_vertex_shader", 2, 0 },
{ "GL_ARB_fragment_shader", 2, 0 },
{ "GL_ARB_shader_objects", 2, 0 },
{ "GL_ARB_texture_non_power_of_two", 0, 0 },
{ "GL_EXT_unpack_subimage", 0, 0 },
{ NULL, 0, 0 }
};
static bool bChecked = false;
initializeOpenGLFunctions();
if (!bChecked)
{
bChecked = true;
const char * version = (const char*)glGetString(GL_VERSION);
if (version != NULL) {
}
const char * extensions = (const char*)glGetString(GL_EXTENSIONS);
if (extensions != NULL) {
QString str = QString::fromUtf8(extensions);
for (int i = 0; required_extensions[i].extension; i++)
{
QString strExt = QString::fromUtf8(required_extensions[i].extension);
if (str.contains(strExt)) {
strExt += " support";
}
else {
strExt += " unsupport";
}
}
}
}
int max_texture_size, max_viewport_width;
glGetIntegerv(GL_MAX_TEXTURE_SIZE, &max_texture_size);
glGetIntegerv(GL_MAX_VIEWPORT_DIMS, &max_viewport_width);
}
void VideoRenderOpenGLWidget::resizeGL(int w, int h)
{
if (h == 0)// 防止被零除
{
h = 1;// 将高设为1
}
if (w == 0)// 防止被零除
{
w = 1;// 将高设为1
}
glViewport(0, 0, w, qMax(0, h));
m_mat.setToIdentity();
}
void VideoRenderOpenGLWidget::initializeShader()
{
if (m_program)
{
m_program->release();
delete m_program;
m_program = 0;
}
m_program = new QOpenGLShaderProgram(this);
if (!m_program->addShaderFromSourceCode(QOpenGLShader::Vertex, kVertexShader)){
}
QByteArray frag;
if (plane.size() > 1)
{
frag = QByteArray(kFragmentShader);
}
else
{
frag = QByteArray(kFragmentShaderRGB);
}
frag.prepend("#ifdef GL_ES\n"
"precision mediump int;\n"
"precision mediump float;\n"
"#else\n"
"#define highp\n"
"#define mediump\n"
"#define lowp\n"
"#endif\n");
if (m_program->addShaderFromSourceCode(QOpenGLShader::Fragment, frag)){
}
char const *const *attr = BukaAttributes();
for (int i = 0; attr [i] [0]; ++i)
{
m_program->bindAttributeLocation(attr [i], i);
}
if (!m_program->link())
{
}
u_MVP_matrix = m_program->uniformLocation("u_MVP_matrix");
// fragment shader
u_colorMatrix = m_program->uniformLocation("u_colorMatrix");
for (int i = 0; i < plane.size(); ++i)
{
QString tex_var = QString("u_Texture%1").arg(QString::number(i));
u_Texture [i] = m_program->uniformLocation(tex_var);
}
}
void VideoRenderOpenGLWidget::clear()
{
//清理一下缓存
glClearColor(1.0, 1.0, 1.0, 1.0);
glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
}
实现绘制代码在上面,可作为参考,另外这代码可以在windows和mac绘制。
注意:opengl绘制可能会影响到qt内部的opengl状态,偶现的,目前未找到问题。如有问题可以参照我前面的博客方式绘制视频。更加简单并且也是跨平台的
如有问题可以加:2043649236这个qq