Qt +FFmpeg实现音视频播放器(2)
接上一篇:添加链接描述
本篇记录OpenGLWidget控件与 ffmpeg解码结合使用。
一.初始化OpenGLWidget控件
- 重写这三个函数
void paintGL();
void initializeGL();
void resizeGL(int width, int height);
- 初始化initializeGL()函数
/*
* function: 初始化OpenGL, new 对象时会被调用
*/
void VideoWidget::initializeGL()
{
if(this->hwDecodeFlag)
{
initialize_Nv12(); //windows平台 硬解码初始化
}
else
{
initialize_yuv420p(); //软解
}
}
/*
* function: 初始化nv12格式 ,打开OpenGL
*/
void VideoWidget::initialize_Nv12()
{
initializeOpenGLFunctions();
const char *vsrc =
"attribute vec4 vertexIn; \
attribute vec4 textureIn; \
varying vec4 textureOut; \
void main(void) \
{ \
gl_Position = vertexIn; \
textureOut = textureIn; \
}";
const char *fsrc =
"varying mediump vec4 textureOut;\n"
"uniform sampler2D textureY;\n"
"uniform sampler2D textureUV;\n"
"void main(void)\n"
"{\n"
"vec3 yuv; \n"
"vec3 rgb; \n"
"yuv.x = texture2D(textureY, textureOut.st).r - 0.0625; \n"
"yuv.y = texture2D(textureUV, textureOut.st).r - 0.5; \n"
"yuv.z = texture2D(textureUV, textureOut.st).g - 0.5; \n"
"rgb = mat3( 1, 1, 1, \n"
"0, -0.39465, 2.03211, \n"
"1.13983, -0.58060, 0) * yuv; \n"
"gl_FragColor = vec4(rgb, 1); \n"
"}\n";
program.addShaderFromSourceCode(QOpenGLShader::Vertex,vsrc);
program.addShaderFromSourceCode(QOpenGLShader::Fragment,fsrc);
emit emitComputerSupportOpenGL(program.link());
GLfloat points[]{
-1.0f, 1.0f,
1.0f, 1.0f,
1.0f, -1.0f,
-1.0f, -1.0f,
0.0f,0.0f,
1.0f,0.0f,
1.0f,1.0f,
0.0f,1.0f
};
vbo.create();
vbo.bind();
vbo.allocate(points,sizeof(points));
GLuint ids[2];
glGenTextures(2,ids);
idY = ids[0];
idUV = ids[1];
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
}
/*
* function: 初始化yuv420p数据环境
*/
void VideoWidget::initialize_yuv420p()
{
//初始化opengl (QOpenGLFunctions继承)函数
initializeOpenGLFunctions();
//顶点shader
const char *vString =
"attribute vec4 vertexPosition;\
attribute vec2 textureCoordinate;\
varying vec2 texture_Out;\
void main(void)\
{\
gl_Position = vertexPosition;\
texture_Out = textureCoordinate;\
}";
//片元shader
const char *tString =
"varying vec2 texture_Out;\
uniform sampler2D tex_y;\
uniform sampler2D tex_u;\
uniform sampler2D tex_v;\
void main(void)\
{\
vec3 YUV;\
vec3 RGB;\
YUV.x = texture2D(tex_y, texture_Out).r;\
YUV.y = texture2D(tex_u, texture_Out).r - 0.5;\
YUV.z = texture2D(tex_v, texture_Out).r - 0.5;\
RGB = mat3(1.0, 1.0, 1.0,\
0.0, -0.39465, 2.03211,\
1.13983, -0.58060, 0.0) * YUV;\
gl_FragColor = vec4(RGB, 1.0);\
}";
glEnable(GL_DEPTH_TEST);
glEnable(GL_CULL_FACE);
//glClearDepth(1.0); //设置深度缓存
glEnable(GL_DEPTH_TEST); //启用深度测试
glDepthFunc(GL_LEQUAL); //所作深度测试的类型
glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST); //告诉系统对透视进行修正
//m_program加载shader(顶点和片元)脚本
//片元(像素)
m_program.addShaderFromSourceCode(QOpenGLShader::Fragment, tString);
//顶点shader
m_program.addShaderFromSourceCode(QOpenGLShader::Vertex, vString);
//设置顶点位置
m_program.bindAttributeLocation("vertexPosition",ATTRIB_VERTEX);
//设置纹理位置
m_program.bindAttributeLocation("textureCoordinate",ATTRIB_TEXTURE);
//编译shader,并检测电脑是否支持OpenGL
m_program.link();
emit emitComputerSupportOpenGL(m_program.bind());
//传递顶点和纹理坐标
//顶点
static const GLfloat ver[] = {
-1.0f,-1.0f,
1.0f,-1.0f,
-1.0f, 1.0f,
1.0f,1.0f
};
//纹理
static const GLfloat tex[] = {
0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f
};
//设置顶点,纹理数组并启用
glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, ver);
glEnableVertexAttribArray(ATTRIB_VERTEX);
glVertexAttribPointer(ATTRIB_TEXTURE, 2, GL_FLOAT, 0, 0, tex);
glEnableVertexAttribArray(ATTRIB_TEXTURE);
//从shader获取地址
m_textureUniformY = m_program.uniformLocation("tex_y");
m_textureUniformU = m_program.uniformLocation("tex_u");
m_textureUniformV = m_program.uniformLocation("tex_v");
//创建纹理
glGenTextures(1, &m_idy);
//Y
glBindTexture(GL_TEXTURE_2D, m_idy);
//放大过滤,线性插值 GL_NEAREST(效率高,但马赛克严重)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
//U
glGenTextures(1, &m_idu);
glBindTexture(GL_TEXTURE_2D, m_idu);
//放大过滤,线性插值
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
//V
glGenTextures(1, &m_idv);
glBindTexture(GL_TEXTURE_2D, m_idv);
//放大过滤,线性插值
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glClear(GL_COLOR_BUFFER_BIT);
}
- 渲染paintGL()函数
/*
*
* function: 渲染视频流
*/
void VideoWidget::paintGL()
{
if(m_type == 1)
{
if(m_py== nullptr || m_puv == nullptr)
{
return;
}
render(m_py, m_puv, m_width, m_height); //nv12接口 硬解
return;
}
else if(m_type == 2)
{
if(m_pu == nullptr || m_pv == nullptr || m_py == nullptr)
return;
render(m_py, m_pu, m_pv, m_width, m_height, m_type); //yuv420p 软解
}
else
;
}
/*
* function: 渲染nv12格式的视频
* @param: py nv12数据 y分量
* @param: puv nv12数据 uv分量
* @param: w
* @param: h
*
*/
void VideoWidget::render(uchar* py,uchar* puv, int w, int h)
{
if(w == 0 || h ==0 || py ==nullptr || puv==nullptr)
return;
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glDisable(GL_DEPTH_TEST);
program.bind();
vbo.bind();
program.enableAttributeArray("vertexIn");
program.enableAttributeArray("textureIn");
program.setAttributeBuffer("vertexIn",GL_FLOAT, 0, 2, 2*sizeof(GLfloat));
program.setAttributeBuffer("textureIn",GL_FLOAT,2 * 4 * sizeof(GLfloat),2,2*sizeof(GLfloat));
glActiveTexture(GL_TEXTURE0 + 1);
glBindTexture(GL_TEXTURE_2D,idY);
glTexImage2D(GL_TEXTURE_2D,0,GL_RED,w,h,0,GL_RED,GL_UNSIGNED_BYTE,py);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glActiveTexture(GL_TEXTURE0 + 0);
glBindTexture(GL_TEXTURE_2D,idUV);
glTexImage2D(GL_TEXTURE_2D,0,GL_RG,w >> 1,h >> 1,0,GL_RG,GL_UNSIGNED_BYTE,puv);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
program.setUniformValue("textureUV",0);
program.setUniformValue("textureY",1);
glDrawArrays(GL_QUADS,0,4);
program.disableAttributeArray("vertexIn");
program.disableAttributeArray("textureIn");
program.release();
}
/*
* function: 渲染yuv420p数据
* @param: py yuv420p数据 y分量
* @param: pu yuv420p数据 u分量
* @param: pv yuv420p数据 v分量
* @param: width
* @param: height
* @param: type=2
*
*/
void VideoWidget::render(uchar* py,uchar* pu,uchar* pv,int width,int height,int type)
{
if(width == 0 || height == 0 || py==nullptr || pu==nullptr || pv==nullptr){
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glClear(GL_COLOR_BUFFER_BIT);
return;
}
//glLoadIdentity();
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, m_idy);
//修改纹理内容(复制内存内容)
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, width, height, 0, GL_RED, GL_UNSIGNED_BYTE,py);
//与shader 关联
glUniform1i(m_textureUniformY, 0);
glActiveTexture(GL_TEXTURE0+1);
glBindTexture(GL_TEXTURE_2D, m_idu);
//修改纹理内容(复制内存内容)
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, width/2, height/2, 0, GL_RED, GL_UNSIGNED_BYTE, pu);
//与shader 关联
glUniform1i(m_textureUniformU,1);
glActiveTexture(GL_TEXTURE0+2);
glBindTexture(GL_TEXTURE_2D, m_idv);
//修改纹理内容(复制内存内容)
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, width/2, height/2, 0, GL_RED, GL_UNSIGNED_BYTE, pv);
//与shader 关联
glUniform1i(m_textureUniformV, 2);
glDrawArrays(GL_TRIANGLE_STRIP,0,4);
}
- reSizeGL()函数
void VideoWidget::resizeGL(int width, int height)
{
glViewport(0, 0, (GLint)width, (GLint)height); //重置当前的视口
}
- 构造函数中添加connect函数,接收解码线程中发送过来yuv数据信号, 音频数据信号
connect(ptr->pVideoThread, &VideoThread::emitMP4Nv12Data, this, &VideoWidget::recvNv12pDataShow);
connect(ptr->pVideoThread, &VideoThread::emitMP4Yuv420pData, this, &VideoWidget::recvYuv420pDataShow);
connect(ptr->pVideoThread, &VideoThread::emitAudioData, this, &VideoWidget::DealemitAudioData);
connect(ptr->pVideoThread, &VideoThread::emitGetAudioFormat, this, &VideoWidget::DealemitAudioFormat);
/*
* function:接收ffmpeg+qsv/dxva2硬解码h.265码流 格式的nv12数据
* @param: py y分量
* @param: puv uv分量
* @param: widht
* @param: height
* @param: hw_type=0 类型
*
*/
void VideoWidget::recvNv12DataShow(uint8_t* py, uint8_t *puv, int width, int height, int hw_type)
{
m_py = py;
m_puv = puv;
m_width = width;
m_height = height;
m_type = hw_type;
update(); //自动调用paintGL函数
}
/*
* function:接收ffmpeg软解码码流 格式的yuv420p数据
* @param: py y分量
* @param: pu u分量
* @param: pv v分量
* @param: widht
* @param: height
* @param: hw_type=2 类型
*
*/
void VideoWidget::recvYuv420pDataShow(uint8_t* m_py, uint8_t *m_pu, uint8_t*m_pv, int width, int height, int hw_type)
{
this->m_py = m_py;
this->m_pu = m_pu;
this->m_pv = m_pv;
m_width = width;
m_height = height;
m_type = hw_type;
update(); //自动调用paintGL函数
}
/*
* function:处理音频解码后数据
* @param:data pcm数据
* @param:size pcm大小
*/
void VideoWidget::DealemitAudioData(char *data, int size)
{
if(!audioDeviceOk)
return;
audioDevice->write(data, size);
}
void VideoWidget::DealemitAudioFormat(int sampleRate, int sampleSize, int channelCount)
{
mp4_sampleRate = sampleRate;
mp4_sampleSize = sampleSize;
mp4_channelCount = channelCount;
initAudioDevice(mp4_sampleRate, mp4_sampleSize, mp4_channelCount);
}
/*
* function:初始化音频设备
* @param: sampleRate 采样率 16000
* @param: sampleSize 采样大小 32
* @param: channelCount 单通道 1
*/
int VideoWidget::initAudioDevice(int sampleRate, int sampleSize, int channelCount)
{
QAudioFormat format;
format.setCodec("audio/pcm");
format.setSampleRate(sampleRate);
format.setSampleSize(4*sampleSize);
format.setChannelCount(channelCount);
format.setByteOrder(QAudioFormat::LittleEndian);
format.setSampleType(QAudioFormat::Float);
QAudioDeviceInfo info(QAudioDeviceInfo::defaultOutputDevice());
audioDeviceOk = info.isFormatSupported(format);
if(audioDeviceOk)
{
audioOutput = new QAudioOutput(format);
audioDevice = audioOutput->start();
connect(audioOutput, &QAudioOutput::stateChanged, this, &VideoWidget::DealAudioStateChanged);
}
else
{
return -1;
}
return 0;
}
二.ffmpeg解码
参考上一篇初始化代码,与这结合
/*
* function:解码
* @param: dec 编码信息上下文
* @param: pkt 读取mp4文件视频流
* @param: yuvframe 解码后数据 格式转换 yuvj420 ----> yuv420p
* @param: swsContext 格式转换
* @return: 正常执行返回0
*/
int VideoThread::decode_packet(AVCodecContext *dec, const AVPacket *pkt, AVFrame* yuvframe, SwsContext * swsContext)
{
int ret = 0;
// submit the packet to the decoder
ret = avcodec_send_packet(dec, pkt);
if (ret < 0)
{
qDebug()<<"record video avcodec_send_packet failed";
return ret;
}
while (ret >= 0)
{
ret = avcodec_receive_frame(dec, frame);
if (ret < 0)
{
if (ret == AVERROR_EOF || ret == AVERROR(EAGAIN))
return 0;
qDebug()<<"record video avcodec_receive_frame failed";
return ret;
}
yuvframe->width = frame->width;
yuvframe->height = frame->height;
AVFrame *tmpFrame = NULL;
if(yuv_pixfmt == AV_PIX_FMT_NV12)
{
if(av_hwframe_transfer_data(swFrame, frame, 0)<0)
tmpFrame = frame;
else
tmpFrame = swFrame;
}
// write the frame data to output file
if (dec->codec->type == AVMEDIA_TYPE_VIDEO)
{
if(yuv_pixfmt == AV_PIX_FMT_NV12)
{
emit emitMP4Nv12Data(tmpFrame->data[0], tmpFrame->data[1], frame->width, frame->height, 1);
}
else
{
sws_scale(swsContext,(const uint8_t *const *)frame->data, frame->linesize, 0,
frame->height,yuvframe->data,yuvframe->linesize);
emit emitMP4Yuv420pData(yuvframe->data[0], yuvframe->data[1], yuvframe->data[2], yuvframe->width, yuvframe->height, 2);
}
}
}
return 0;
}
int VideoThread::decode_packet(AVCodecContext *dec, const AVPacket *pkt,AVFrame* frame)
{
int got_frame;
int ret = avcodec_decode_audio4(dec, frame, &got_frame, pkt);
if (ret < 0) {
return ret;
}
if(got_frame)
{
size_t unpadded_linesize = frame->nb_samples * av_get_bytes_per_sample((enum AVSampleFormat)frame->format);
emit emitAudioData((char *)frame->extended_data[0], (int)unpadded_linesize);
}
return 0;
}