XPlay2.0 播放器项目实战-05

1 类图分析

单一职责:每个类做独立的事情,解码与解封装分开;

开闭原则:对扩展开发,对修改封闭;

替换原则:一个类的派生类应该可以直接替换基类,派生类应该不会更改父类实现的方法,不要破坏父类的功能;

接口隔离:不强迫用户用不需要的方法;

依赖倒置:尽量依靠抽象接口编程而不是具体实现类;

分割的模块:

XDemux 解封装

XDecode 解码

XResample 重采样

XAudioPlay 音频播放

VideoWidget 视频图像播放

XSlider 滑动条

2 QT工程

选择模块:

项目->添加类:

static 修饰局部变量,属于类,不属于对象

在局部变量前,加上关键字static,该变量就被定义成为一个静态局部变量。可作为对象间的一种通信机制。

静态局部变量有以下特点:

●该变量在全局数据区分配内存;

●静态局部变量在程序执行到该对象的声明处时被首次初始化,即以后的函数调用不再进行初始化;

●静态局部变量一般在声明处初始化,如果没有显式初始化,会被程序自动初始化为0;

●它始终驻留在全局数据区,直到程序运行结束。但其作用域为局部作用域,当定义它的函数或语句块结束时,其作用域随之结束(但其本身还留存在全局数据区,只是函数结束,没有能操作它的对象了);

测试代码:

#include <iostream>

class Test
{
public:
  Test() {
    static bool is_first = true;
    static int num = 0;
    if (is_first) {
      std::cout << "is_first,num is " << num++ << std::endl;
      is_first = false;
    }
    else {
      std::cout << "is not first,num is" << num << std::endl;
    }

  }
};
int main()
{
  Test t1;
  Test t2;
}

输出:
is_first,num is 0
is not first,num is1

XDemux

Open 打开文件解封装

#pragma once
#include <mutex>
//接口类
struct AVFormatContext;
class XDemux
{
public:

  //打开媒体文件,或者流媒体 rtmp http rstp
  virtual bool Open(const char *url);

  XDemux();
  virtual ~XDemux();

public:
  //媒体总时长(毫秒)
  int totalMs = 0;
protected:
  std::mutex mux;

  //解封装上下文
  AVFormatContext *ic = NULL;

  //音视频索引,读取时区分音视频
  int videoStream = 0;
  int audioStream = 1;

};


#include "XDemux.h"
#include <iostream>
using namespace std;
extern "C" {
#include "libavformat/avformat.h"
}
#pragma comment(lib,"avformat.lib")
#pragma comment(lib,"avutil.lib")
static double r2d(AVRational r)
{
  return r.den == 0 ? 0 : (double)r.num / (double)r.den;
}

bool XDemux::Open(const char *url)
{
  //参数设置
  AVDictionary *opts = NULL;
  //设置rtsp流已tcp协议打开
  av_dict_set(&opts, "rtsp_transport", "tcp", 0);

  //网络延时时间
  av_dict_set(&opts, "max_delay", "500", 0);

  //上锁
  mux.lock();
  int re = avformat_open_input(
    &ic,
    url,
    0,  // 0表示自动选择解封器
    &opts //参数设置,比如rtsp的延时时间
  );
  if (re != 0)
  {
    mux.unlock();
    char buf[1024] = { 0 };
    av_strerror(re, buf, sizeof(buf) - 1);
    cout << "open " << url << " failed! :" << buf << endl;
    return false;
  }
  cout << "open " << url << " success! " << endl;

  //获取流信息 针对:This is useful for file formats with no headers such as MPEG
  re = avformat_find_stream_info(ic, 0);

  //总时长 毫秒
  int totalMs = ic->duration / (AV_TIME_BASE / 1000);
  cout << "totalMs = " << totalMs << endl;

  //打印视频流详细信息
  av_dump_format(ic, 0, url, 0);


  //获取视频流
  videoStream = av_find_best_stream(ic, AVMEDIA_TYPE_VIDEO, -1, -1, NULL, 0);
  AVStream *as = ic->streams[videoStream];

  cout << "=======================================================" << endl;
  cout << videoStream << "视频信息" << endl;
  cout << "codec_id = " << as->codecpar->codec_id << endl;
  cout << "format = " << as->codecpar->format << endl;
  cout << "width=" << as->codecpar->width << endl;
  cout << "height=" << as->codecpar->height << endl;
  //帧率 fps 分数转换
  cout << "video fps = " << r2d(as->avg_frame_rate) << endl;

  cout << "=======================================================" << endl;
  cout << audioStream << "音频信息" << endl;
  //获取音频流
  audioStream = av_find_best_stream(ic, AVMEDIA_TYPE_AUDIO, -1, -1, NULL, 0);
  as = ic->streams[audioStream];
  cout << "codec_id = " << as->codecpar->codec_id << endl;
  cout << "format = " << as->codecpar->format << endl;
  cout << "sample_rate = " << as->codecpar->sample_rate << endl;
  //AVSampleFormat;
  cout << "channels = " << as->codecpar->channels << endl;
  //一帧数据?? 单通道样本数 
  cout << "frame_size = " << as->codecpar->frame_size << endl;
  //1024 * 2 * 2 = 4096  fps = sample_rate/frame_size
  mux.unlock();


  return true;
}

XDemux::XDemux()
{
  //保证只初始化一次,且线程安全
  static bool isFirst = true;
  static std::mutex dmux;
  dmux.lock();
  if (isFirst)
  {
    //初始化封装库
    av_register_all();

    //初始化网络库 (可以打开rtsp rtmp http 协议的流媒体视频)
    avformat_network_init();
    isFirst = false;
  }
  dmux.unlock();
}


XDemux::~XDemux()
{
}


测试:

#include "XPlay2.h"
#include "XDemux.h"

XPlay2::XPlay2(QWidget *parent)
    : QWidget(parent)
{
    ui.setupUi(this);

    XDemux demux;
    demux.Open("../../res/test.mp4");
}

编译报错;

输出:

对于AAC编码格式

1024个sample为一个Frame,1024个样本为一个aac帧,对于44.1khz(一秒采样44.1k个sample)的aac音频格式一帧的播放时间是1024/44100(秒)= 23.2ms

对于MP3编码格式

如果为双声道MP3 1152个sample为一个Frame
 

Read()


//空间需要调用者释放 ,释放AVPacket对象空间,和数据空间 av_packet_free
AVPacket *XDemux::Read()
{
  mux.lock();//加锁防止使用ic过程中被外部释放
  if (!ic) //容错
  {
    mux.unlock();
    return 0;
  }
  AVPacket *pkt = av_packet_alloc();
  //读取一帧,并分配空间
  int re = av_read_frame(ic, pkt);
  if (re != 0)
  {
    mux.unlock();
    av_packet_free(&pkt);
    return 0;
  }
  //pts转换为毫秒
  pkt->pts = pkt->pts*(1000 * (r2d(ic->streams[pkt->stream_index]->time_base)));
  pkt->dts = pkt->dts*(1000 * (r2d(ic->streams[pkt->stream_index]->time_base)));
  mux.unlock();
  cout << pkt->pts << " " << flush;
  return pkt;

}

CopyVPara  CopyAPara获取音视频参数

AVStream中的音视频信息存放在codecpar中,如果直接传递ic->streams[audioStream]->codecpar;ic->streams[videoStream]->codecpar;如果在多线程中使用则会出现问题,当解封装的线程已经释放ic,外部获取的ic已经无效,无法使用;因此需要拷贝一份出来:

/**
 * Allocate a new AVCodecParameters and set its fields to default values
 * (unknown/invalid/0). The returned struct must be freed with
 * avcodec_parameters_free().
 */
AVCodecParameters *avcodec_parameters_alloc(void);

/**
 * Free an AVCodecParameters instance and everything associated with it and
 * write NULL to the supplied pointer.
 */
void avcodec_parameters_free(AVCodecParameters **par);

/**
 * Copy the contents of src to dst. Any allocated fields in dst are freed and
 * replaced with newly allocated duplicates of the corresponding fields in src.
 *
 * @return >= 0 on success, a negative AVERROR code on failure.
 */
int avcodec_parameters_copy(AVCodecParameters *dst, const AVCodecParameters *src);
     ......
     * Codec parameters associated with this stream. Allocated and freed by
     * libavformat in avformat_new_stream() and avformat_free_context()
     * respectively.
     *
     * - demuxing: filled by libavformat on stream creation or in
     *             avformat_find_stream_info()
     * - muxing: filled by the caller before avformat_write_header()
     */
    AVCodecParameters *codecpar;
} AVStream;
	//获取视频参数  返回的空间需要清理  avcodec_parameters_free
	AVCodecParameters *CopyVPara();
	
	//获取音频参数  返回的空间需要清理 avcodec_parameters_free
	AVCodecParameters *CopyAPara();
//获取视频参数  返回的空间需要清理  avcodec_parameters_free
AVCodecParameters *XDemux::CopyVPara()
{
	mux.lock();
	if (!ic)
	{
		mux.unlock();
		return NULL;
	}
	AVCodecParameters *pa = avcodec_parameters_alloc();
	avcodec_parameters_copy(pa, ic->streams[videoStream]->codecpar);
	mux.unlock();
	return pa;
}

//获取音频参数  返回的空间需要清理 avcodec_parameters_free
AVCodecParameters *XDemux::CopyAPara()
{
	mux.lock();
	if (!ic)
	{
		mux.unlock();
		return NULL;
	}
	AVCodecParameters *pa = avcodec_parameters_alloc();
	avcodec_parameters_copy(pa, ic->streams[audioStream]->codecpar);
	mux.unlock();
	return pa;
}

Seek(double pos) 调到某个位置

.h
  //seek 位置 pos 0.0 ~1.0
  virtual bool Seek(double pos);
.cpp

//seek 位置 pos 0.0 ~1.0
bool XDemux::Seek(double pos)
{
  mux.lock();
  if (!ic)
  {
    mux.unlock();
    return false;
  }
  //清理读取缓冲:防止粘包现象
  avformat_flush(ic);

  long long seekPos = 0;
  seekPos = ic->streams[videoStream]->duration * pos;//todo 如果duration 需要采用base_time ic->duration 这种方式
  int re = av_seek_frame(ic, videoStream, seekPos, AVSEEK_FLAG_BACKWARD | AVSEEK_FLAG_FRAME);
  mux.unlock();
  if (re < 0) {
    return false;
  }
  return true;
}

测试:

#include "XPlay2.h"
#include "XDemux.h"

XPlay2::XPlay2(QWidget *parent)
    : QWidget(parent)
{
    ui.setupUi(this);

    XDemux demux;
    demux.Open("../../res/test.mp4");
    demux.Seek(0.99);
    for (;;)
    {
        AVPacket * pkt =  demux.Read();
        if (!pkt) {
           break;
        }
    }
    
}

Clear Close

	//清空读取缓存
	virtual void Clear();
	virtual void Close();


//清空读取缓存
void XDemux::Clear()
{
	mux.lock();
	if (!ic)
	{
		mux.unlock();
		return ;
	}
	//清理读取缓冲
	avformat_flush(ic);
	mux.unlock();
}
void XDemux::Close()
{
	mux.lock();
	if (!ic)
	{
		mux.unlock();
		return;
	}
	avformat_close_input(&ic);
	//媒体总时长(毫秒)
	totalMs = 0;
	mux.unlock();
}

此外,在open 的时候,先清理一次环境;

XDecode

close clear open

#pragma once
struct AVCodecParameters;
struct AVCodecContext;
#include <mutex>
class XDecode
{
public:
	//打开解码器,不管成功与否都释放para空间
	virtual bool Open(AVCodecParameters *para);

	virtual void Close();
	virtual void Clear();

	XDecode();
	virtual ~XDecode();

public:
  bool isAudio = false;

protected:
	AVCodecContext *codec = 0;
	std::mutex mux;
};

#include "XDecode.h"
extern "C"
{
#include<libavcodec/avcodec.h>
}
#include <iostream>
using namespace std;
void XDecode::Close()
{
	mux.lock();
	if (codec)
	{
		avcodec_close(codec);
		avcodec_free_context(&codec);
	}
	mux.unlock();
}

void XDecode::Clear()
{
	mux.lock();
	//清理解码缓冲
	if (codec)
    // * Reset the internal decoder state / flush internal buffers. Should be called.when seeking or when switching to a different stream.
		avcodec_flush_buffers(codec);

	mux.unlock();
}

//打开解码器
bool XDecode::Open(AVCodecParameters *para)
{
	if (!para) return false;
	Close();
	//
	///解码器打开
	///找到解码器
	AVCodec *vcodec = avcodec_find_decoder(para->codec_id);
	if (!vcodec)
	{
		avcodec_parameters_free(&para);
		cout << "can't find the codec id " << para->codec_id << endl;
		return false;
	}
	cout << "find the AVCodec " << para->codec_id << endl;

	mux.lock();//加锁,防止一边open 一边close 同时对codec操作
	codec = avcodec_alloc_context3(vcodec);

	///配置解码器上下文参数
	avcodec_parameters_to_context(codec, para);
	avcodec_parameters_free(&para);

	//八线程解码
	codec->thread_count = 8;

	///打开解码器上下文
	int re = avcodec_open2(codec, 0, 0);
	if (re != 0)
	{
		avcodec_free_context(&codec);
		mux.unlock();
		char buf[1024] = { 0 };
		av_strerror(re, buf, sizeof(buf) - 1);
		cout << "avcodec_open2  failed! :" << buf << endl;
		return false;
	}
	mux.unlock();
	cout << " avcodec_open2 success!" << endl;
	return true;
}

XDecode::XDecode()
{
}


XDecode::~XDecode()
{
}

测试:

#include "XPlay2.h"
#include "XDemux.h"
#include "XDecode.h"
XPlay2::XPlay2(QWidget *parent)
  : QWidget(parent)
{
  ui.setupUi(this);

  XDemux demux;
  demux.Open("../../res/test.mp4");
  demux.Seek(0.99);

  XDecode decode;
  decode.Open(demux.CopyVPara());
  decode.Open(demux.CopyAPara());

  return;
  for (;;)
  {
    AVPacket * pkt = demux.Read();
    if (!pkt) {
      break;
    }
  }

}

音视频解码send和receive

	//发送到解码线程,不管成功与否都释放pkt空间(对象和媒体内容)
	virtual bool Send(AVPacket *pkt);

	//获取解码数据,一次send可能需要多次Recv,获取缓冲中的数据Send NULL在Recv多次
	//每次复制一份,由调用者释放 av_frame_free
	virtual AVFrame* Recv()

//发送到解码线程,不管成功与否都释放pkt空间(对象和媒体内容)
bool XDecode::Send(AVPacket *pkt)
{
	//容错处理
	if (!pkt || pkt->size <= 0 || !pkt->data)return false;
	mux.lock();
	if (!codec)
	{
		mux.unlock();
		return false;
	}
	int re = avcodec_send_packet(codec, pkt);
	mux.unlock();
	av_packet_free(&pkt);
	if (re != 0)return false;
	return true;
}

//获取解码数据,一次send可能需要多次Recv,获取缓冲中的数据Send NULL在Recv多次
//每次复制一份,由调用者释放 av_frame_free
AVFrame* XDecode::Recv()
{
	mux.lock();
	if (!codec)
	{
		mux.unlock();
		return NULL;
	}
	AVFrame *frame = av_frame_alloc();
	int re = avcodec_receive_frame(codec, frame);
	mux.unlock();
	if (re != 0)
	{
		av_frame_free(&frame);
		return NULL;
	}
	cout << "["<<frame->linesize[0] << "] " << flush;
	return frame;
}

同时在XDemux中增加判断AVPacket类型的接口:

	virtual bool IsAudio(AVPacket *pkt);

bool XDemux::IsAudio(AVPacket *pkt)
{
	if (!pkt) return false;
	if (pkt->stream_index == videoStream)
		return false;
	return true;

}

测试代码:

#include "XPlay2.h"
#include "XDemux.h"
#include "XDecode.h"
#include <iostream>
using namespace std;
XPlay2::XPlay2(QWidget *parent)
  : QWidget(parent)
{
  ui.setupUi(this);

  XDemux demux;
  demux.Open("../../res/test.mp4");
  demux.Seek(0.99);

  /
  ///解码测试
  XDecode vdecode;
  cout << "vdecode.Open() = " << vdecode.Open(demux.CopyVPara()) << endl;
  //vdecode.Clear();
  //vdecode.Close();
  XDecode adecode;
  cout << "adecode.Open() = " << adecode.Open(demux.CopyAPara()) << endl;

  for (;;)
  {
    AVPacket *pkt = demux.Read();
    if (demux.IsAudio(pkt))
    {
      adecode.Send(pkt);
      AVFrame *frame = adecode.Recv();
      cout << "Audio:" << frame << endl;
    }
    else
    {
      vdecode.Send(pkt);
      AVFrame *frame = vdecode.Recv();
      cout << "Video:" << frame << endl;
    }
    if (!pkt)break;
  }


}

XVideoWidget

XPlay2.ui 中添加opengl并且提升为XVideoWidget

XVideoWidget

#pragma once

#include <QOpenGLWidget>
#include <QOpenGLFunctions>
#include <QGLShaderProgram>
#include <mutex>
struct AVFrame;
class XVideoWidget : public QOpenGLWidget, protected QOpenGLFunctions
{
	Q_OBJECT

public:
	void Init(int width, int height);

	//不管成功与否都释放frame空间
	virtual void Repaint(AVFrame *frame);

	XVideoWidget(QWidget *parent);
	~XVideoWidget();
protected:
	//刷新显示
	void paintGL();

	//初始化gl
	void initializeGL();

	// 窗口尺寸变化
	void resizeGL(int width, int height);
private:
	std::mutex mux;

	//shader程序
	QGLShaderProgram program;

	//shader中yuv变量地址
	GLuint unis[3] = { 0 };
	//openg的 texture地址
	GLuint texs[3] = { 0 };

	//材质内存空间
	unsigned char *datas[3] = { 0 };

	int width = 240;
	int height = 128;

};

与之前的相比,去掉了从文件中读取yuv,增加了init,repaint 绘制视频 


#include "XVideoWidget.h"
#include <QDebug>
#include <QTimer>
extern "C" {
#include <libavutil/frame.h>
}
//自动加双引号
#define GET_STR(x) #x
#define A_VER 3
#define T_VER 4

FILE *fp = NULL;

//顶点shader
const char *vString = GET_STR(
  attribute vec4 vertexIn;
  attribute vec2 textureIn;
  varying vec2 textureOut;
void main(void)
{
  gl_Position = vertexIn;
  textureOut = textureIn;
}
);


//片元shader
const char *tString = GET_STR(
  varying vec2 textureOut;
  uniform sampler2D tex_y;
  uniform sampler2D tex_u;
  uniform sampler2D tex_v;
void main(void)
{
  vec3 yuv;
  vec3 rgb;
  yuv.x = texture2D(tex_y, textureOut).r;
  yuv.y = texture2D(tex_u, textureOut).r - 0.5;
  yuv.z = texture2D(tex_v, textureOut).r - 0.5;
  rgb = mat3(1.0, 1.0, 1.0,
    0.0, -0.39465, 2.03211,
    1.13983, -0.58060, 0.0) * yuv;
  gl_FragColor = vec4(rgb, 1.0);
}

);


XVideoWidget::XVideoWidget(QWidget *parent)
  : QOpenGLWidget(parent)
{
}

XVideoWidget::~XVideoWidget()
{
}

void XVideoWidget::Repaint(AVFrame *frame)
{
  if (!frame)return;
  mux.lock();
  //容错,保证尺寸正确
  if (!datas[0] || width * height == 0 || frame->width != this->width || frame->height != this->height)
  {
    av_frame_free(&frame);
    mux.unlock();
    return;
  }
  memcpy(datas[0], frame->data[0], width*height);
  memcpy(datas[1], frame->data[1], width*height / 4);
  memcpy(datas[2], frame->data[2], width*height / 4);
  //行对齐问题
  mux.unlock();

  //刷新显示
  update();
}
void XVideoWidget::Init(int width, int height)
{
  mux.lock();//线程安全,防止多线程使用datas
  this->width = width;
  this->height = height;
  delete datas[0];
  delete datas[1];
  delete datas[2];
  ///分配材质内存空间
  datas[0] = new unsigned char[width*height];		//Y
  datas[1] = new unsigned char[width*height / 4];	//U
  datas[2] = new unsigned char[width*height / 4];	//V


  if (texs[0])
  {
    glDeleteTextures(3, texs);//宽高变化后销毁之前的材质
  }

  //创建材质:与宽高相关
  glGenTextures(3, texs);

  //Y
  glBindTexture(GL_TEXTURE_2D, texs[0]);
  //放大过滤,线性插值   GL_NEAREST(效率高,但马赛克严重)
  glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
  glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
  //创建材质显卡空间
  glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, width, height, 0, GL_RED, GL_UNSIGNED_BYTE, 0);

  //U
  glBindTexture(GL_TEXTURE_2D, texs[1]);
  //放大过滤,线性插值
  glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
  glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
  //创建材质显卡空间
  glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, width / 2, height / 2, 0, GL_RED, GL_UNSIGNED_BYTE, 0);

  //V
  glBindTexture(GL_TEXTURE_2D, texs[2]);
  //放大过滤,线性插值
  glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
  glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
  //创建材质显卡空间
  glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, width / 2, height / 2, 0, GL_RED, GL_UNSIGNED_BYTE, 0);


  mux.unlock();


}
//初始化opengl
void XVideoWidget::initializeGL()
{
  qDebug() << "initializeGL";
  mux.lock();
  //初始化opengl (QOpenGLFunctions继承)函数 
  initializeOpenGLFunctions();

  //program加载shader(顶点和片元)脚本
  //片元(像素)
  qDebug() << program.addShaderFromSourceCode(QGLShader::Fragment, tString);
  //顶点shader
  qDebug() << program.addShaderFromSourceCode(QGLShader::Vertex, vString);

  //设置顶点坐标的变量
  program.bindAttributeLocation("vertexIn", A_VER);

  //设置材质坐标
  program.bindAttributeLocation("textureIn", T_VER);

  //编译shader
  qDebug() << "program.link() = " << program.link();

  qDebug() << "program.bind() = " << program.bind();

  //传递顶点和材质坐标
  //顶点
  static const GLfloat ver[] = {
    -1.0f,-1.0f,
    1.0f,-1.0f,
    -1.0f, 1.0f,
    1.0f,1.0f
  };

  //材质
  static const GLfloat tex[] = {
    0.0f, 1.0f,
    1.0f, 1.0f,
    0.0f, 0.0f,
    1.0f, 0.0f
  };

  //顶点
  glVertexAttribPointer(A_VER, 2, GL_FLOAT, 0, 0, ver);
  glEnableVertexAttribArray(A_VER);

  //材质
  glVertexAttribPointer(T_VER, 2, GL_FLOAT, 0, 0, tex);
  glEnableVertexAttribArray(T_VER);


  //从shader获取材质
  unis[0] = program.uniformLocation("tex_y");
  unis[1] = program.uniformLocation("tex_u");
  unis[2] = program.uniformLocation("tex_v");

  mux.unlock();

  //fp = fopen("out240x128.yuv", "rb");
  //if (!fp)
  //{
  //	qDebug() << "out240x128.yuv file open failed!";
  //}


  启动定时器
  //QTimer *ti = new QTimer(this);
  //connect(ti, SIGNAL(timeout()), this, SLOT(update()));
  //ti->start(40);
}

//刷新显示
void XVideoWidget::paintGL()
{
  //if (feof(fp))
  //{
  //	fseek(fp, 0, SEEK_SET);
  //}
  //fread(datas[0], 1, width*height, fp);
  //fread(datas[1], 1, width*height / 4, fp);
  //fread(datas[2], 1, width*height / 4, fp);
  mux.lock();
  glActiveTexture(GL_TEXTURE0);
  glBindTexture(GL_TEXTURE_2D, texs[0]); //0层绑定到Y材质
                       //修改材质内容(复制内存内容)
  glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_RED, GL_UNSIGNED_BYTE, datas[0]);
  //与shader uni遍历关联
  glUniform1i(unis[0], 0);


  glActiveTexture(GL_TEXTURE0 + 1);
  glBindTexture(GL_TEXTURE_2D, texs[1]); //1层绑定到U材质
                       //修改材质内容(复制内存内容)
  glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width / 2, height / 2, GL_RED, GL_UNSIGNED_BYTE, datas[1]);
  //与shader uni遍历关联
  glUniform1i(unis[1], 1);


  glActiveTexture(GL_TEXTURE0 + 2);
  glBindTexture(GL_TEXTURE_2D, texs[2]); //2层绑定到V材质
                       //修改材质内容(复制内存内容)
  glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width / 2, height / 2, GL_RED, GL_UNSIGNED_BYTE, datas[2]);
  //与shader uni遍历关联
  glUniform1i(unis[2], 2);

  glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
  qDebug() << "paintGL";
  mux.unlock();
}


// 窗口尺寸变化
void XVideoWidget::resizeGL(int width, int height)
{
  mux.lock();
  qDebug() << "resizeGL " << width << ":" << height;
  mux.unlock();
}

XResample

#pragma once
struct AVCodecParameters;
struct AVFrame;
struct SwrContext;
#include <mutex>
class XResample
{
public:

	//输出参数和输入参数一致除了采样格式,输出为S16 ,会释放para
  virtual bool Open(AVCodecParameters *para, bool isClearPara = false);
	virtual void Close();

	//返回重采样后大小,不管成功与否都释放indata空间
	virtual int Resample(AVFrame *indata, unsigned char *data);
	XResample();
	~XResample();

	//AV_SAMPLE_FMT_S16
	int outFormat = 1;
protected:
	std::mutex mux;
	SwrContext *actx = 0;
};

#include "XResample.h"
extern "C" {
#include <libswresample/swresample.h>
#include <libavcodec/avcodec.h>
}
#pragma comment(lib,"swresample.lib")
#include <iostream>
using namespace std;

void XResample::Close()
{
	mux.lock();
	if (actx)
		swr_free(&actx);

	mux.unlock();
}

//输出参数和输入参数一致除了采样格式,输出为S16
bool XResample::Open(AVCodecParameters *para, bool isClearPara)
{
	if (!para)return false;
	mux.lock();
	//音频重采样 上下文初始化
	//if(!actx)
	//	actx = swr_alloc();

	//如果actx为NULL会分配空间
	actx = swr_alloc_set_opts(actx,
		av_get_default_channel_layout(2),	//输出格式
		(AVSampleFormat)outFormat,			//输出样本格式 1 AV_SAMPLE_FMT_S16
		para->sample_rate,					//输出采样率
		av_get_default_channel_layout(para->channels),//输入格式
		(AVSampleFormat)para->format,
		para->sample_rate,
		0, 0
	);

  if (isClearPara) {
    avcodec_parameters_free(&para);
  }

	int re = swr_init(actx);
	mux.unlock();
	if (re != 0)
	{
		char buf[1024] = { 0 };
		av_strerror(re, buf, sizeof(buf) - 1);
		cout << "swr_init  failed! :" << buf << endl;
		return false;
	}
	//unsigned char *pcm = NULL;
	return true;
}

//返回重采样后大小,不管成功与否都释放indata空间
int XResample::Resample(AVFrame *indata, unsigned char *d)
{
	if (!indata) return 0;
	if (!d)
	{
		av_frame_free(&indata);
		return 0;
	}
	uint8_t *data[2] = { 0 };
	data[0] = d;
	int re = swr_convert(actx,
		data, indata->nb_samples,		//输出
		(const uint8_t**)indata->data, indata->nb_samples	//输入
	);
	if (re <= 0)return re;
	int outSize = re * indata->channels * av_get_bytes_per_sample((AVSampleFormat)outFormat);
	return outSize;
}
XResample::XResample()
{
}


XResample::~XResample()
{
}

XAudioPlay

单例模式,工厂模式

#pragma once
class XAudioPlay
{
public:
	//打开音频播放
	virtual bool Open() = 0;
	virtual void Close() = 0;

	//播放音频
	virtual bool Write(const unsigned char *data, int datasize) = 0;
	virtual int GetFree() = 0;

  //获取采样率 采样位数 通道数
  int getSampleRate()const { return sampleRate; }
  int getSampleSize()const { return sampleSize; }
  int getChannels() const { return channels; }

  //设置采样率 采样位数 通道数
  void setSampleRate(int sampleRate = 44100) {
    this->sampleRate = sampleRate;
  }
  void setSampleSize(int sampleSize = 16) {
    this->sampleSize = sampleSize;
  }
  void setChannels(int channels = 2) {
    this->channels = channels;
  }


	static XAudioPlay *Get();
	XAudioPlay();
	virtual ~XAudioPlay();
protected:
  int sampleRate = 44100;
  int sampleSize = 16;
  int channels = 2;
};

#include "XAudioPlay.h"
#include <QAudioFormat>
#include <QAudioOutput>
#include <mutex>
class CXAudioPlay :public XAudioPlay
{
public:
	QAudioOutput *output = NULL;
	QIODevice *io = NULL;
	std::mutex mux;
	virtual void Close()
	{
		mux.lock();
		if (io)
		{
			io->close ();
			io = NULL;
		}
		if (output)
		{
			output->stop();
			delete output;
			output = 0;
		}
		mux.unlock();
	}
	virtual bool Open()
	{
		Close();
		QAudioFormat fmt;
		fmt.setSampleRate(sampleRate);
		fmt.setSampleSize(sampleSize);
		fmt.setChannelCount(channels);
		fmt.setCodec("audio/pcm");
		fmt.setByteOrder(QAudioFormat::LittleEndian);
		fmt.setSampleType(QAudioFormat::UnSignedInt);
		mux.lock();
    if (output)
    {
      output->stop();
      delete output;
      output = NULL;
    }
		output = new QAudioOutput(fmt);
		io = output->start(); //开始播放
		mux.unlock();
		if(io)
			return true;
		return false;
	}
	virtual bool Write(const unsigned char *data, int datasize)
	{
		if (!data || datasize <= 0)return false;
		mux.lock();
		if (!output || !io)
		{
			mux.unlock();
			return false;
		}
		int size = io->write((char *)data, datasize);
		mux.unlock();
		if (datasize != size)
			return false;
		return true;
	}

	virtual int GetFree()
	{
		mux.lock();
		if (!output)
		{
			mux.unlock();
			return 0;
		}
		int free = output->bytesFree();
		mux.unlock();
		return free;
	}
};
XAudioPlay *XAudioPlay::Get()
{
	static CXAudioPlay play;
	return &play;
}

XAudioPlay::XAudioPlay()
{
}


XAudioPlay::~XAudioPlay()
{
}

XAudioThread
 

#pragma once
#include <QThread>
#include <mutex>
#include <list>
struct AVCodecParameters;
class XDecode;
class XAudioPlay;
class XResample;
class AVPacket;
class XAudioThread:public QThread
{
public:

	//打开,不管成功与否都清理
	virtual bool Open(AVCodecParameters *para,int sampleRate,int channels);
	virtual void Push(AVPacket *pkt);
	void run();
	XAudioThread();
	virtual ~XAudioThread();
	
  //设置最大队列数量
  void setMaxList(int maxList = 100) { this->maxList = maxList; }
  //停止线程
  void stopWork() { isExit = true; }

private:

	std::list <AVPacket *> packs;
	std::mutex mux;
	XDecode *decode = 0;
	XAudioPlay *ap = 0;
	XResample *res = 0;

  //最大队列
  int maxList = 100;
  bool isExit = false;
};

#include "XAudioThread.h"
#include "XDecode.h"
#include "XAudioPlay.h"
#include "XResample.h"
#include <iostream>
using namespace std;

void XAudioThread::Push(AVPacket *pkt)
{
	if (!pkt)return; 
	//阻塞
	while (!isExit)
	{
		mux.lock();
		if (packs.size() < maxList)
		{
			packs.push_back(pkt);
			mux.unlock();
			break;
		}
		mux.unlock();
		msleep(1);
	}
}

bool XAudioThread::Open(AVCodecParameters *para,int sampleRate, int channels)
{
	if (!para)return false;
	mux.lock();
	if (!decode) decode = new XDecode();
	if (!res) res = new XResample();
	if (!ap) ap = XAudioPlay::Get();
	bool re = true;

  //由于 XDecode XResample open都会对para进行清理,在XResample open增加第二个参数false不清理,由XDecode在后面清理
	if (!res->Open(para, false))
	{
		cout << "XResample open failed!" << endl;
		re = false;
	}

  ap->setChannels(channels);
  ap->setSampleRate(sampleRate);
	if (!ap->Open())
	{
		re = false;
		cout << "XAudioPlay open failed!" << endl;
	}
	if (!decode->Open(para))
	{
		cout << "audio XDecode open failed!" << endl;
		re = false;
	}
	mux.unlock();
	cout << "XAudioThread::Open :" << re << endl;
	return re;
}
void XAudioThread::run()
{
	unsigned char *pcm = new unsigned char[1024 * 1024 * 10];
	while (!isExit)
	{
		mux.lock();

		//没有数据
		if (packs.empty() || !decode || !res || !ap)
		{
			mux.unlock();
			msleep(1);
			continue;
		}

		AVPacket *pkt = packs.front();
		packs.pop_front();
		bool re = decode->Send(pkt);
		if (!re)
		{
			mux.unlock();
			msleep(1);
			continue;
		}
		//一次send 多次recv
		while (!isExit)
		{
			AVFrame * frame = decode->Recv();
			if (!frame) break;
			//重采样 
			int size = res->Resample(frame, pcm);
			//播放音频
			while (!isExit)
			{
				if (size <= 0)break;
				//缓冲未播完,空间不够
				if (ap->GetFree() < size)
				{
					msleep(1);
					continue;
				}
				ap->Write(pcm, size);
				break;
			}
		}
		mux.unlock();
	}
	delete pcm;
}

XAudioThread::XAudioThread()
{
}


XAudioThread::~XAudioThread()
{
	//等待线程退出
	isExit = true;
	wait();
}

XVideoThread

#pragma once

///解码和显示视频
struct AVPacket;
struct AVCodecParameters;
class XDecode;
#include <list>
#include <mutex>
#include <QThread>
#include "IVideoCall.h"
class XVideoThread:public QThread
{
public:
	//打开,不管成功与否都清理
	virtual bool Open(AVCodecParameters *para,IVideoCall *call,int width,int height);
	virtual void Push(AVPacket *pkt);
	void run();

	XVideoThread();
	virtual ~XVideoThread();

protected:
	std::list <AVPacket *> packs;
	std::mutex mux;
	XDecode *decode = 0;
	IVideoCall *call = 0;
  //最大队列
  int maxList = 100;
  bool isExit = false;

};

#include "XVideoThread.h"
#include "XDecode.h"
#include <iostream>
using namespace std;
//打开,不管成功与否都清理
bool XVideoThread::Open(AVCodecParameters *para, IVideoCall *call,int width,int height)
{
	if (!para)return false;
	mux.lock();

	//初始化显示窗口
	this->call = call;
	if (call)
	{
		call->Init(width, height);
	}

	//打开解码器
	if (!decode) decode = new XDecode();
	int re = true;
	if (!decode->Open(para))
	{
		cout << "audio XDecode open failed!" << endl;
		re = false;
	}
	mux.unlock();
	cout << "XAudioThread::Open :" << re << endl;
	return re;
}

void XVideoThread::Push(AVPacket *pkt)
{
	if (!pkt)return;
	//阻塞
	while (!isExit)
	{
		mux.lock();
		if (packs.size() < maxList)
		{
			packs.push_back(pkt);
			mux.unlock();
			break;
		}
		mux.unlock();
		msleep(1);
	}
}

void XVideoThread::run()
{
	while (!isExit)
	{
		mux.lock();

		//没有数据
		if (packs.empty() || !decode)
		{
			mux.unlock();
			msleep(1);
			continue;
		}

		AVPacket *pkt = packs.front();
		packs.pop_front();
		bool re = decode->Send(pkt);
		if (!re)
		{
			mux.unlock();
			msleep(1);
			continue;
		}
		//一次send 多次recv
		while (!isExit)
		{
			AVFrame * frame = decode->Recv();
			if (!frame) break;
			//显示视频
			if (call)
			{
				call->Repaint(frame);
			}

		}
		mux.unlock();
	}
}

XVideoThread::XVideoThread()
{
}


XVideoThread::~XVideoThread()
{
	//等待线程退出
	isExit = true;
	wait();
}

XDemuxThread

#pragma once
#include <QThread>
#include "IVideoCall.h"
#include <mutex>
class XDemux;
class XVideoThread;
class XAudioThread;
class XDemuxThread:public QThread
{
public:
	//创建对象并打开
	virtual bool Open(const char *url, IVideoCall *call);

	//启动所有线程
	virtual void Start();

	void run();
	XDemuxThread();
	virtual ~XDemuxThread();
	bool isExit = false;
protected:
	std::mutex mux;
	XDemux *demux = 0;
	XVideoThread *vt = 0;
	XAudioThread *at = 0;
};

#include "XDemuxThread.h"
#include "XDemux.h"
#include "XVideoThread.h"
#include "XAudioThread.h"
#include <iostream>
using namespace std;
void XDemuxThread::run()
{
	while (!isExit)
	{
		mux.lock();
		if (!demux)
		{
			mux.unlock();
			msleep(5);
			continue;
		}
		AVPacket *pkt = demux->Read();
		if (!pkt) 
		{
			mux.unlock();
			msleep(5);
			continue;
		}
		//判断数据是音频
		if (demux->IsAudio(pkt))
		{
			if(at)at->Push(pkt);
		}
		else //视频
		{
			if (vt)vt->Push(pkt);
		}

		mux.unlock();

	}
}


bool XDemuxThread::Open(const char *url, IVideoCall *call)
{
	if (url == 0 || url[0] == '\0')
		return false;

	mux.lock();
	if (!demux) demux = new XDemux();
	if (!vt) vt = new XVideoThread();
	if (!at) at = new XAudioThread();

	//打开解封装
	bool re = demux->Open(url);
	if (!re)
	{
		cout << "demux->Open(url) failed!" << endl;
		return false;
	}
	//打开视频解码器和处理线程
	if (!vt->Open(demux->CopyVPara(), call, demux->getVideoWidth(), demux->getVideHeight()))
	{
		re = false;
		cout << "vt->Open failed!" << endl;
	}
	//打开音频解码器和处理线程
	if (!at->Open(demux->CopyAPara(), demux->getAudioSampleRate(), demux->getAudioChannels()))
	{
		re = false;
		cout << "at->Open failed!" << endl;
	}
	mux.unlock();
	cout << "XDemuxThread::Open " << re << endl;
	return re;
}
//启动所有线程
void XDemuxThread::Start()
{
	mux.lock();
	//启动当前线程
	QThread::start();
	if (vt)vt->start();
	if (at)at->start();
	mux.unlock();
}
XDemuxThread::XDemuxThread()
{
}


XDemuxThread::~XDemuxThread()
{
	isExit = true;
	wait();
}

测试:

#include "XPlay2.h"
#include <QtWidgets/QApplication>
#include <iostream>

#include "XDemuxThread.h"
using namespace std;

int main(int argc, char *argv[])
{
  QApplication a(argc, argv);
  XPlay2 w;
  w.show();

  XDemuxThread dt;
  dt.Open("../../res/test.mp4", w.ui.video);
  dt.Start();


  return a.exec();
}

工程:https://download.csdn.net/download/LIJIWEI0611/18383156

音视频同步:视频同步音频

        对于一个播放器,一般来说,其基本构成均可划分为以下几部分:
数据接收(网络/本地)->解复用->音视频解码->音视频同步->音视频输出。
        基本框架如下图所示:

播放器基本框图

        为什么需要音视频同步?
媒体数据经过解复用流程后,音频/视频解码便是独立的,也是独立播放的。而在音频流和视频流中,其播放速度都是有相关信息指定的:

        视频:帧率,表示视频一秒显示的帧数。
        音频:采样率,表示音频一秒播放的样本的个数。

        从帧率及采样率,即可知道视频/音频播放速度。声卡和显卡均是以一帧数据来作为播放单位,如果单纯依赖帧率及采样率来进行播放,在理想条件下,应该是同步的,不会出现偏差。
        以一个44.1KHz的AAC音频流和24FPS的视频流为例:
        一个AAC音频frame每个声道包含1024个采样点,则一个frame的播放时长(duration)为:(1024/44100)×1000ms = 23.22ms;一个视频frame播放时长(duration)为:1000ms/24 = 41.67ms。理想情况下,音视频完全同步,音视频播放过程如下图所示:

        但实际情况下,如果用上面那种简单的方式,慢慢的就会出现音视频不同步的情况,要不是视频播放快了,要么是音频播放快了。可能的原因如下:

        一帧的播放时间,难以精准控制。音视频解码及渲染的耗时不同,可能造成每一帧输出有一点细微差距,长久累计,不同步便越来越明显。(例如受限于性能,42ms才能输出一帧)

        音频输出是线性的,而视频输出可能是非线性,从而导致有偏差。

        媒体流本身音视频有差距。(特别是TS实时流,音视频能播放的第一个帧起点不同)

        所以,解决音视频同步问题,引入了时间戳

        常用同步策略
        前面已经说了,实现音视频同步,在播放时,需要选定一个参考时钟,读取帧上的时间戳,同时根据的参考时钟来动态调节播放。现在已经知道时间戳就是PTS,那么参考时钟的选择一般来说有以下三种:

  • 将视频同步到音频上:就是以音频的播放速度为基准来同步视频。
  • 将音频同步到视频上:就是以视频的播放速度为基准来同步音频。
  • 将视频和音频同步外部的时钟上:选择一个外部时钟为基准,视频和音频的播放速度都以该时钟为标准。

        当播放源比参考时钟慢,则加快其播放速度,或者丢弃;快了,则延迟播放。

        这三种是最基本的策略,考虑到人对声音的敏感度要强于视频,频繁调节音频会带来较差的观感体验,且音频的播放时钟为线性增长,所以一般会以音频时钟为参考时钟,视频同步到音频上。

逻辑:
在XDecode::Recv() 中获取到一帧的pts
AVFrame* XDecode::Recv()
{
	mux.lock();
	if (!codec)
	{
		mux.unlock();
		return NULL;
	}
	AVFrame *frame = av_frame_alloc();
	int re = avcodec_receive_frame(codec, frame);
	mux.unlock();
	if (re != 0)
	{
		av_frame_free(&frame);
		return NULL;
	}
	//cout << "["<<frame->linesize[0] << "] " << flush;
	pts = frame->pts;
	return frame;
}


在音频线程中:
void XAudioThread::run()
{
	unsigned char *pcm = new unsigned char[1024 * 1024 * 10];
	while (!isExit)
	{
		mux.lock();

		//没有数据
		if (packs.empty() || !decode || !res || !ap)
		{
			mux.unlock();
			msleep(1);
			continue;
		}

		AVPacket *pkt = packs.front();
		packs.pop_front();
		bool re = decode->Send(pkt);
		if (!re)
		{
			mux.unlock();
			msleep(1);
			continue;
		}
		//一次send 多次recv
		while (!isExit)
		{
			AVFrame * frame = decode->Recv();
			if (!frame) break;

			//减去缓冲中未播放的时间
			pts = decode->pts - ap->GetNoPlayMs();//当前的时钟应该是该帧的时间-qtbuffer中还没播放的音频帧占用的时间;

			//cout << "audio pts = " << pts << endl;

			//重采样 
			int size = res->Resample(frame, pcm);
			//播放音频
			while (!isExit)
			{
				if (size <= 0)break;
				//缓冲未播完,空间不够
				if (ap->GetFree() < size)
				{
					msleep(1);
					continue;
				}
				ap->Write(pcm, size);
				break;
			}
		}
		mux.unlock();
	}
	delete pcm;
}
XDemuxThread线程中,将音频当前的pts传递给视频播放线程
void XDemuxThread::run()
{
	while (!isExit)
	{
		mux.lock();
		if (!demux)
		{
			mux.unlock();
			msleep(5);
			continue;
		}

    //音视频同步
    if (vt && at)
    {
      vt->setSynPts(at->getPts());//赋值
    }

		AVPacket *pkt = demux->Read();
		if (!pkt) 
		{
			mux.unlock();
			msleep(5);
			continue;
		}
		//判断数据是音频
		if (demux->IsAudio(pkt))
		{
			if(at)at->Push(pkt);
		}
		else //视频
		{
			if (vt)vt->Push(pkt);
		}

		mux.unlock();

	}
}

//视频同步音频:

void XVideoThread::run()
{
	while (!isExit)
	{
		mux.lock();

		//没有数据
		if (packs.empty() || !decode)
		{
			mux.unlock();
			msleep(1);
			continue;
		}
    //音视频同步 视频帧的时间比音频帧的时间快,需要sleep等待音频
    if (synpts < decode->pts)
    {
      mux.unlock();
      msleep(1);
      continue;
    }

		AVPacket *pkt = packs.front();
		packs.pop_front();
		bool re = decode->Send(pkt);
		if (!re)
		{
			mux.unlock();
			msleep(1);
			continue;
		}
		//一次send 多次recv
		while (!isExit)
		{
			AVFrame * frame = decode->Recv();
			if (!frame) break;
			//显示视频
			if (call)
			{
				call->Repaint(frame);
			}

		}
		mux.unlock();
	}
}

YUV420P行对齐问题

AVFrame中的数据存放

yuv420p与yuv420的区别

区别只是在于数据在内存中的排布: 
yuv420p:yyyyyyyy uuuu vvvvv 
yuv420: yuv yuv yuv yuv

yuv420p在AVFrame中的存储

YUV420P(planar格式)在ffmpeg中存储是在struct AVFrame的data[]数组中 
data[0]——-Y分量 
data[1]——-U分量 
data[2]——-V分量 
linesize[]数组中保存的是对应通道的数据宽度 
linesize[0]——-Y分量的宽度 
linesize[1]——-U分量的宽度 
linesize[2]——-V分量的宽度

注意: 
linesize[0]的值并不一定等于图片的宽度。我将一张1366*768的图片编码后,linesize[0]的值为1376,大概是为了内存对齐的缘故:32位系统4字节对齐,64位系统8字节对齐

因此:图片的宽度和Y分量并不是一定相等的!

解决:

void XVideoWidget::Repaint(AVFrame *frame)
{
  if (!frame)return;
  mux.lock();
  //容错,保证尺寸正确
  if (!datas[0] || width * height == 0 || frame->width != this->width || frame->height != this->height)
  {
    av_frame_free(&frame);
    mux.unlock();
    return;
  }

  //行对齐问题 yuv 512 256 256;width=480,height=288  512和480并不一致
  if (width == frame->linesize[0]) //无需对齐
  {
    memcpy(datas[0], frame->data[0], width*height);
    memcpy(datas[1], frame->data[1], width*height / 4);
    memcpy(datas[2], frame->data[2], width*height / 4);
  }
  else//行对齐问题
  {
    for (int i = 0; i < height; i++) //Y 
      memcpy(datas[0] + width * i, frame->data[0] + frame->linesize[0] * i, width);
    for (int i = 0; i < height / 2; i++) //U
      memcpy(datas[1] + width / 2 * i, frame->data[1] + frame->linesize[1] * i, width/2);
    for (int i = 0; i < height / 2; i++) //V
      memcpy(datas[2] + width / 2 * i, frame->data[2] + frame->linesize[2] * i, width/2);

  }
  mux.unlock();

  av_frame_free(&frame);
  //刷新显示
  update();
}

  其中:Y分量的datas[0]可以看成一个二维数组:linesize[0]*height;由于行对齐的原因:linesize[0]>=width,因此复制datas[0]时,需要复制的是widthxheight

通过QT界面选择视频文件

工程资源:https://download.csdn.net/download/LIJIWEI0611/18398033

抽象出XDecodeThread类添加Push和Pop函数以及clear接口

#pragma once
///解码和显示视频
struct AVPacket;
class XDecode;
#include <list>
#include <mutex>
#include <QThread>
class XDecodeThread:public QThread
{
public:
	XDecodeThread();
	virtual ~XDecodeThread();
	virtual void Push(AVPacket *pkt);

	//清理队列
	virtual void Clear();

	//取出一帧数据,并出栈,如果没有返回NULL
	virtual AVPacket *Pop();

  //设置最大队列数量
  void setMaxList(int maxList = 100) { this->maxList = maxList; }
  //停止线程
  void stopWork() { isExit = true; }

protected:
	std::list <AVPacket *> packs;
	std::mutex mux;
	XDecode *decode = 0;

  //最大队列
  int maxList = 100;
  bool isExit = false;
};

#include "XDecodeThread.h"
#include "XDecode.h"

void XDecodeThread::Clear()
{
	mux.lock();
	decode->Clear();
	while (!packs.empty())
	{
		AVPacket *pkt = packs.front();
		XFreePacket(&pkt);
		packs.pop_front();
	}
	mux.unlock();
}


//取出一帧数据,并出栈,如果没有返回NULL
AVPacket *XDecodeThread::Pop()
{
	mux.lock();
	if (packs.empty())
	{
		mux.unlock();
		return NULL;
	}
	AVPacket *pkt = packs.front();
	packs.pop_front();
	mux.unlock();
	return pkt;
}
void XDecodeThread::Push(AVPacket *pkt)
{
	if (!pkt)return;
	//阻塞
	while (!isExit)
	{
		mux.lock();
		if (packs.size() < maxList)
		{
			packs.push_back(pkt);
			mux.unlock();
			break;
		}
		mux.unlock();
		msleep(1);
	}
}


XDecodeThread::XDecodeThread()
{
	//打开解码器
	if (!decode) decode = new XDecode();
}


XDecodeThread::~XDecodeThread()
{	//等待线程退出
	isExit = true;
	wait();
}

Close关闭清理资源

//清理资源,停止线程
void XDecodeThread::Close()
{
  Clear();

  //等待线程退出
  isExit = true;
  wait();
  decode->Close();

  mux.lock();
  delete decode;
  decode = NULL;
  mux.unlock();
}


//停止线程,清理资源
void XAudioThread::Close()
{
	XDecodeThread::Close();
	if (res)
	{
		res->Close();
		amux.lock();
		delete res;
		res = NULL;
		amux.unlock();
	}
	if (ap)
	{
		ap->Close();
		amux.lock();
		ap = NULL;
		amux.unlock();
	}
}



//关闭线程清理资源
void XDemuxThread::Close()
{
	isExit = true;
	wait();
	if (vt) vt->Close();
	if (at) at->Close();
	mux.lock();
	delete vt;
	delete at;
	vt = NULL;
	at = NULL;
	mux.unlock();
}

XPlay2::~XPlay2()
{
	dt.Close();
}

XPlay2::~XPlay2()
{
  dt.Close();
}

诊断工具的使用

定位到run:

定位到:

重采样后未释放,修改:

//返回重采样后大小,不管成功与否都释放indata空间
int XResample::Resample(AVFrame *indata, unsigned char *d)
{
	if (!indata) return 0;
	if (!d)
	{
		av_frame_free(&indata);
		return 0;
	}
	uint8_t *data[2] = { 0 };
	data[0] = d;
	int re = swr_convert(actx,
		data, indata->nb_samples,		//输出
		(const uint8_t**)indata->data, indata->nb_samples	//输入
	);

  int outSize = re * indata->channels * av_get_bytes_per_sample((AVSampleFormat)outFormat);
  av_frame_free(&indata);
	if (re <= 0)return re;
	
	return outSize;
}

进度条显示:

  • 1
    点赞
  • 4
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值