QT、ffmpeg、sdl构建简易的播放器

主要特点:

支持rtsp、http等协议及各种本地的视频mkv、mp3、mp4等等;

支持跳转;

音视频同步;

支持暂停、继续播放;

支持多操作系统;

可用用监控视频播放等简单环境。

总体架构:

分为三个线程(不包括主界面线程):

一个解包,包括音视频解码、转换,但是不包括音频转换;

一个音频解码、播放;

一个视频播放。

解码线程源代码:

#ifndef MEDIADECODE_H
#define MEDIADECODE_H

#include <QThread>
#include <QDebug>
#include <QObject>
#include <QMutex>
#include <QMutexLocker>
#include <QWaitCondition>
#include <QQueue>

extern "C"
{
    #include <stdlib.h>
    #include <stdio.h>
    #include <string.h>
    #include <math.h>

    #include "libavutil/avassert.h"
    #include "libavutil/channel_layout.h"
    #include "libavutil/opt.h"
    #include "libavformat/avformat.h"
    #include "libswscale/swscale.h"
    #include "libswresample/swresample.h"
    #include "libavfilter/avfilter.h"
    #include "libavcodec/avcodec.h"
    #include "libavutil/imgutils.h"
    #include <libavcodec/avcodec.h>
    #include <libavformat/avformat.h>
    #include <libswscale/swscale.h>
};



class MediaDecode : public QThread
{
    Q_OBJECT
    enum Thread_Status{TH_STOPPED, TH_RUNNING, TH_PAUSED};

    public:
        MediaDecode();
        ~MediaDecode();

    Q_SIGNALS:
        //void sigData(QImage img);
        void sigError(QString strErr);
        void sigOpened(int width, int height, int64_t videoSeconds, int64_t audioSeconds);
    public:
        void setUrl(QString url);
        AVFrame* nextAudioFrame();
        AVFrame* nextVideoFrame();
        void pause();
        void resume();
        void stop();
        long long getVideoStartPts();
        AVRational getVideoTimeBase();
        long long getVideoTotalPts();
        long long getAudioStartPts();
        AVRational getAudioTimeBase();
        long long getAudioTotalPts();
        int getVideoFrameWidth();
        int getVideoFrameHeight();
        bool hasVideo();
        bool hasAudio();
        AVChannelLayout getAudioChannelLayout();
        AVSampleFormat getAudioSampleFormat();
        int getAudioSampleRate();
        void seek(int64_t iSecond);
        //AVPixelFormat getVideoFormat();

    protected:
        void run();

    protected:
        void reset();
        bool readStream();
        void decodeStream();
        void emptyVideoQueue();
        void emptyAudioQueue();

    private:
        QString m_url;
        Thread_Status m_status;

    private:
        QMutex m_mutex;
        int m_nMaxFrames;
        QQueue<AVFrame*> m_qVideoFrames;
        QQueue<AVFrame*> m_qAudioFrames;

        AVFormatContext* m_pFormatCtx = NULL;
        AVCodecContext* m_pVideoCodecCtx = NULL;
        AVCodecContext* m_pAudioCodecCtx = NULL;
        AVPacket* m_avpacket = NULL;
        AVFrame *m_frame = NULL;

        SwsContext *m_pImageConvert = NULL;

        int m_videoIndex = -1;
        int m_audioIndex = -1;
        int m_width = 0;
        int m_height = 0;

        long long m_videoStartPts = 0;
        AVRational m_videoTimeBase;
        int64_t m_videoTotalPts = 0;
        int64_t m_videoSeconds = 0;

        long long m_audioStartPts = 0;
        AVRational m_audioTimeBase;
        int64_t m_audioTotalPts = 0;
        int64_t m_audioSeconds = 0;

        AVPixelFormat m_videoOutFormat;
        AVPixelFormat m_videoFormat;

        AVChannelLayout m_audioLayout;
        AVSampleFormat  m_audioSampleFormat;
        int m_audioSampleRate = 0;

        AVFrame* m_pPrevVideoFrame = NULL;
        AVFrame* m_pPrevAudioFrame = NULL;

        int64_t m_iSeekTo;
};


#endif // MEDIADECODE_H

#include "mediadecode.h"
#include <QDebug>
#include <QTime>
#include <QApplication>


MediaDecode::MediaDecode()
    : QThread()
{
    m_status = TH_STOPPED;
    m_nMaxFrames = 16;
    m_qAudioFrames.reserve(m_nMaxFrames);
    m_qVideoFrames.reserve(m_nMaxFrames);

    m_videoOutFormat = AV_PIX_FMT_YUV420P/*AV_PIX_FMT_RGB32*/;

    m_videoIndex = -1;
    m_audioIndex = -1;

    m_width = 0;
    m_height = 0;

    m_videoStartPts = 0;
    m_videoTotalPts = 0;
    m_videoTimeBase.den = 25;
    m_videoTimeBase.num = 1;

    m_audioStartPts = 0;
    m_audioTotalPts = 0;
    m_audioTimeBase.den = 25;
    m_audioTimeBase.num = 1;

    m_iSeekTo = -1;

    avformat_network_init();
    av_log_set_level(AV_LOG_QUIET);
}

MediaDecode::~MediaDecode()
{
    reset();
}

void MediaDecode::reset()
{
    m_videoIndex = -1;
    m_audioIndex = -1;

    m_iSeekTo = -1;

    m_width = 0;
    m_height = 0;

    m_videoStartPts = 0;
    m_videoTotalPts = 0;
    m_videoTimeBase.den = 25;
    m_videoTimeBase.num = 1;

    m_audioStartPts = 0;
    m_audioTotalPts = 0;
    m_audioTimeBase.den = 25;
    m_audioTimeBase.num = 1;

    if (m_pFormatCtx != NULL)
    {
        avformat_close_input(&m_pFormatCtx);
        avformat_free_context(m_pFormatCtx);
        m_pFormatCtx = NULL;
    }
    if (m_pVideoCodecCtx != NULL)
    {
        avcodec_close(m_pVideoCodecCtx);
        avcodec_free_context(&m_pVideoCodecCtx);
        m_pVideoCodecCtx = NULL;
    }
    if (m_pAudioCodecCtx != NULL)
    {
        avcodec_close(m_pAudioCodecCtx);
        avcodec_free_context(&m_pAudioCodecCtx);
        m_pAudioCodecCtx = NULL;
    }
    if(m_pImageConvert != NULL)
    {
        sws_freeContext(m_pImageConvert);
        m_pImageConvert = NULL;
    }
    if (m_avpacket != NULL)
    {
        av_packet_free(&m_avpacket);
        delete m_avpacket;
        m_avpacket = NULL;
    }
    if (m_frame != NULL)
    {
        av_frame_free(&m_frame);
        m_frame = NULL;
    }
    if (m_pPrevVideoFrame != NULL)
    {
        av_free(m_pPrevVideoFrame->data[0]);
        av_frame_free(&m_pPrevVideoFrame);
        m_pPrevVideoFrame = NULL;
    }

    if (m_pPrevAudioFrame != NULL)
    {
        av_frame_free(&m_pPrevAudioFrame);
        m_pPrevAudioFrame = NULL;
    }

    m_qAudioFrames.clear();
    m_qVideoFrames.clear();
}

bool MediaDecode::hasVideo()
{
    m_mutex.lock();
    bool bHas = m_videoIndex >= 0;
    m_mutex.unlock();

    return bHas;
}

bool MediaDecode::hasAudio()
{
    m_mutex.lock();
    bool bHas = m_audioIndex >= 0;
    m_mutex.unlock();

    return bHas;
}


void MediaDecode::seek(int64_t iSecond)
{
    if(iSecond < 0)
    {
        return;
    }
    QMutexLocker locker(&m_mutex);
    m_iSeekTo = iSecond;
}

AVChannelLayout MediaDecode::getAudioChannelLayout()
{
    return m_pAudioCodecCtx->ch_layout;
}


AVSampleFormat MediaDecode::getAudioSampleFormat()
{
    return m_audioSampleFormat;
}


int MediaDecode::getAudioSampleRate()
{
    return m_audioSampleRate;
}


long long MediaDecode::getVideoStartPts()
{
    return m_videoStartPts;
}


long long MediaDecode::getVideoTotalPts()
{
    return m_videoTotalPts;
}


AVRational MediaDecode::getVideoTimeBase()
{
    return m_videoTimeBase;
}


long long MediaDecode::getAudioStartPts()
{
    return m_audioStartPts;
}


long long MediaDecode::getAudioTotalPts()
{
    return m_audioTotalPts;
}


AVRational MediaDecode::getAudioTimeBase()
{
    return m_audioTimeBase;
}

int MediaDecode::getVideoFrameWidth()
{
    return m_width;
}


int MediaDecode::getVideoFrameHeight()
{
    return m_height;
}


void MediaDecode::pause()
{
    QMutexLocker locker(&m_mutex);
    m_status = TH_PAUSED;
}


void MediaDecode::resume()
{
    QMutexLocker locker(&m_mutex);
    m_status = TH_RUNNING;
}


void MediaDecode::stop()
{
    QMutexLocker locker(&m_mutex);
    m_status = TH_STOPPED;
}

AVFrame* MediaDecode::nextVideoFrame()
{
    AVFrame* pFrame=NULL;

    if (m_pPrevVideoFrame != NULL)
    {
        //for(int i=0;i<1/*AV_NUM_DATA_POINTERS*/;i++)
        {
            av_free(m_pPrevVideoFrame->data[0]);
        }

        av_frame_free(&m_pPrevVideoFrame);
        m_pPrevVideoFrame = NULL;
    }

    m_mutex.lock();
    if(m_qVideoFrames.size() > 0)
    {
        pFrame = m_qVideoFrames.dequeue();
    }

    m_mutex.unlock();

    m_pPrevVideoFrame = pFrame;
    return pFrame;
}

AVFrame* MediaDecode::nextAudioFrame()
{
    AVFrame* pFrame=NULL;

    if (m_pPrevAudioFrame != NULL)
    {
        av_frame_free(&m_pPrevAudioFrame);
        m_pPrevAudioFrame = NULL;
    }

    m_mutex.lock();
    if(m_qAudioFrames.size() > 0)
    {
        pFrame = m_qAudioFrames.dequeue();
    }
    m_mutex.unlock();

    m_pPrevAudioFrame = pFrame;
    return pFrame;
}


void MediaDecode::emptyVideoQueue()
{
    AVFrame* pFrame = NULL;
    while(!m_qVideoFrames.isEmpty())
    {
        pFrame=m_qVideoFrames.dequeue();
        av_free(pFrame->data[0]);
        av_frame_free(&pFrame);
    }
}


void MediaDecode::emptyAudioQueue()
{
    AVFrame* pFrame = NULL;
    while(!m_qAudioFrames.isEmpty())
    {
        pFrame=m_qAudioFrames.dequeue();
        av_frame_free(&pFrame);
    }
}

void MediaDecode::setUrl(QString url)
{
    m_url = url;
    reset();
    readStream();
}


bool MediaDecode::readStream()
{
    m_pFormatCtx = avformat_alloc_context();
    AVDictionary *avdic=NULL;
    m_iSeekTo = -1;

    av_dict_set(&avdic, "buffer_size", "1024000", 0); //设置缓存大小,1080p可将值调大
    av_dict_set(&avdic, "rtsp_transport", "udp", 0);

    //打开输入文件
    if (avformat_open_input(&m_pFormatCtx, m_url.toStdString().c_str(), NULL, &avdic) != 0)
    {
        emit sigError("打开文件/流失败!");
        return false;
    }
    if (avformat_find_stream_info(m_pFormatCtx, NULL) < 0)
    {
        reset();
        emit sigError("找不到视频/音频流!");
        return false;
    }

    m_videoIndex = -1;
    for (int i = 0; i < (int)m_pFormatCtx->nb_streams; i++)
    {
        //查找视频
        if (m_pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
        {
            m_videoIndex = i;
            break;
        }
    }
    m_audioIndex = -1;
    for (int i = 0; i < (int)m_pFormatCtx->nb_streams; i++)
    {
        //查找音频
        if (m_pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
        {
            m_audioIndex = i;
            break;
        }
    }
    if (m_videoIndex == -1 && m_audioIndex == -1)
    {
        reset();
        emit sigError("即无视频流也无音频流!");
        return false;
    }

    m_width = 0;
    m_height = 0;
    m_videoSeconds = 0;
    m_audioSeconds = 0;

    if(m_videoIndex >=0)
    {
        m_pVideoCodecCtx = avcodec_alloc_context3(NULL);
        if (m_pVideoCodecCtx == NULL)
        {
            reset();
            emit sigError("无法创建视频解码器!");
            return false;
        }
        if(avcodec_parameters_to_context(m_pVideoCodecCtx, m_pFormatCtx->streams[m_videoIndex]->codecpar)<0)
        {
            reset();
            emit sigError("初始化视频解码器失败!");
            return false;
        }

        //指向AVCodec的指针,查找解码器
        AVCodec *pVideoCodec = (AVCodec*)avcodec_find_decoder(m_pFormatCtx->streams[m_videoIndex]->codecpar->codec_id);
        if (pVideoCodec == NULL)
        {
            reset();
            emit sigError("查找视频解码器失败!");
            return false;
        }
        //打开解码器
        if (avcodec_open2(m_pVideoCodecCtx, pVideoCodec, NULL) < 0)
        {
            reset();
            emit sigError("打开视频解码器失败!");
            return false;
        }

        m_width = m_pFormatCtx->streams[m_videoIndex]->codecpar->width;
        m_height = m_pFormatCtx->streams[m_videoIndex]->codecpar->height;

        m_videoFormat = m_pVideoCodecCtx->pix_fmt;
        m_pImageConvert = sws_getContext(m_width, m_height, m_pVideoCodecCtx->pix_fmt, m_width, m_height, m_videoOutFormat/*AV_PIX_FMT_YUV420PAV_PIX_FMT_RGB32*/, SWS_BICUBIC, NULL, NULL, NULL);
        if(m_pImageConvert == NULL)
        {
            reset();
            emit sigError("视频转换器创建失败!");
            return false;
        }
        m_videoStartPts = m_pFormatCtx->streams[m_videoIndex]->start_time;
        m_videoTimeBase = m_pFormatCtx->streams[m_videoIndex]->time_base;
        m_videoTotalPts = m_pFormatCtx->streams[m_videoIndex]->duration;// * av_q2d(m_videoTimeBase);
        m_videoSeconds = m_pFormatCtx->streams[m_videoIndex]->duration * m_videoTimeBase.num / m_videoTimeBase.den;
    }

    if(m_audioIndex >=0)
    {
        m_pAudioCodecCtx = avcodec_alloc_context3(NULL);
        if (m_pAudioCodecCtx == NULL)
        {
            reset();
            emit sigError("无法创建音频解码器!");
            return false;
        }
        if(avcodec_parameters_to_context(m_pAudioCodecCtx, m_pFormatCtx->streams[m_audioIndex]->codecpar)<0)
        {
            reset();
            emit sigError("初始化音频解码器失败!");
            return false;
        }

        //指向AVCodec的指针,查找解码器
        AVCodec *pAudioCodec = (AVCodec*)avcodec_find_decoder(m_pFormatCtx->streams[m_audioIndex]->codecpar->codec_id);
        if (pAudioCodec == NULL)
        {
            reset();
            emit sigError("查找音频解码器失败!");
            return false;
        }
        //打开解码器
        if (avcodec_open2(m_pAudioCodecCtx, pAudioCodec, NULL) < 0)
        {
            reset();
            emit sigError("打开音频解码器失败!");
            return false;
        }
        m_audioLayout = m_pAudioCodecCtx->ch_layout;
        m_audioSampleFormat = m_pAudioCodecCtx->sample_fmt;
        m_audioSampleRate = m_pAudioCodecCtx->sample_rate;
        m_audioStartPts = m_pFormatCtx->streams[m_audioIndex]->start_time;
        m_audioTimeBase = m_pFormatCtx->streams[m_audioIndex]->time_base;
        m_audioTotalPts = m_pFormatCtx->streams[m_audioIndex]->duration;// * av_q2d(m_audioTimeBase);
        m_audioSeconds = m_pFormatCtx->streams[m_audioIndex]->duration * m_audioTimeBase.num / m_audioTimeBase.den;
    }

    emit sigOpened(m_width, m_height, m_videoSeconds, m_audioSeconds);

    return true;
}

void MediaDecode::run()
{
    //qDebug() << "MediaDecode::run Thread id is:" << QThread::currentThreadId();
    m_status = TH_RUNNING;
    decodeStream();
}


void MediaDecode::decodeStream()
{
    m_avpacket = av_packet_alloc();
    m_frame = av_frame_alloc();

    bool bFull = false;
    Thread_Status status;
    while (true)
    {
        msleep(5);
        m_mutex.lock();
        status = m_status;
        bFull = (m_qVideoFrames.size() >= m_nMaxFrames) || (m_qAudioFrames.size() >= m_nMaxFrames);
        m_mutex.unlock();        

        if(status == TH_STOPPED)
        {
            break;
        }
        if(status == TH_PAUSED || bFull)
        {
            msleep(200);
            continue;
        }
        if(m_avpacket == NULL)
        {
            m_avpacket = av_packet_alloc();
        }

        if(m_iSeekTo >= 0)
        {
            av_seek_frame(m_pFormatCtx,-1,m_iSeekTo*AV_TIME_BASE, AVSEEK_FLAG_BACKWARD);
            m_iSeekTo = -1;
            av_packet_free(&m_avpacket);
            m_avpacket = NULL;
            continue;
        }
        else
        {
            if(av_read_frame(m_pFormatCtx, m_avpacket) < 0)
            {
                break;
            }
        }

        if (m_avpacket->stream_index == m_videoIndex)
        {
            if (avcodec_send_packet(m_pVideoCodecCtx, m_avpacket) >= 0)
            {
                if(m_frame == NULL)
                {
                    m_frame = av_frame_alloc();
                }
                int ret = avcodec_receive_frame(m_pVideoCodecCtx, m_frame);
                if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
                {
                    continue;
                }
                else if (ret < 0)
                {
                    continue;
                }

                AVFrame *pFrameRGB = av_frame_alloc();
                int nBGRFrameSize = av_image_get_buffer_size(m_videoOutFormat/*AV_PIX_FMT_YUV420PAV_PIX_FMT_RGB32*/, m_width, m_height, 1);
                uint8_t* rgbBuffer = NULL;

                rgbBuffer = (uint8_t*)av_malloc(nBGRFrameSize);
                av_image_fill_arrays(pFrameRGB->data, pFrameRGB->linesize, rgbBuffer, m_videoOutFormat/*AV_PIX_FMT_YUV420PAV_PIX_FMT_RGB32*/, m_width, m_height, 1);

                sws_scale(m_pImageConvert, (uint8_t const *const *)m_frame->data, m_frame->linesize, 0, m_height, pFrameRGB->data, pFrameRGB->linesize);

                pFrameRGB->pts = m_frame->pts;
                m_qVideoFrames.enqueue(pFrameRGB);

                av_frame_free(&m_frame);
                m_frame = NULL;
            }
        }

        if(m_avpacket->stream_index == m_audioIndex)
        {
            AVFrame *frameAudio = av_frame_alloc();

            int ret = avcodec_send_packet(m_pAudioCodecCtx, m_avpacket);
            if (ret < 0 && ret != AVERROR(EAGAIN) && ret != AVERROR_EOF)
            {
                continue;
            }
            ret = avcodec_receive_frame(m_pAudioCodecCtx, frameAudio);
            if (ret < 0 && ret != AVERROR_EOF)
            {
                continue;
            }

            m_qAudioFrames.enqueue(frameAudio);
        }
        av_packet_free(&m_avpacket);
        m_avpacket = NULL;
    }
    reset();
    m_status = TH_STOPPED;
}

音频播放源代码:

#ifndef AUDIOPLAYER_H
#define AUDIOPLAYER_H


#include "mediadecode.h"
#include <QThread>
#include <QObject>
#include <QMutex>
#include <QMutexLocker>
#include <QWaitCondition>
#include <QQueue>

extern "C"
{
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libswresample/swresample.h"
#include "libavfilter/avfilter.h"
#include "libavcodec/avcodec.h"
#include "libavutil/imgutils.h"
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include "sdl/include/SDL.h"
#undef main
};




class AudioPlayer : public QThread
{
    enum Thread_Status{TH_STOPPED, TH_RUNNING, TH_PAUSED};
public:
    explicit AudioPlayer(QObject *parent = nullptr);
    void setDecodeThread(MediaDecode* p);
    void setTotalPts(int64_t nTotal);
    double getCurPlayingTime();
    void setStartPts(int64_t iPts);
    void pause();
    void resume();
    void stop();
    void setMute(bool bMute);
    void seek();
protected:
    void run();

private:
    MediaDecode* m_pDecodeThread;
    QMutex m_mutex;
    int64_t m_nTotalPts;
    AVRational m_timebase;
    SDL_AudioSpec m_spec;
    SDL_AudioDeviceID m_audioDeviceID;
    Thread_Status m_status;
    bool m_bMute;
    int64_t m_iStartPts;
    int64_t m_iCurPts;
    int64_t m_nDeletedLen;
    int m_nBytes;
};

#endif // AUDIOPLAYER_H


#include "audioplayer.h"

AudioPlayer::AudioPlayer(QObject *parent) : QThread(parent)
{
    m_status = TH_STOPPED;
    m_pDecodeThread = NULL;
    m_nTotalPts = -1;
    SDL_zero(m_spec);
    m_audioDeviceID = 0;
    m_bMute = false;
    m_nDeletedLen = 0;
    m_iCurPts = 0;
    m_iStartPts = 0;
    m_nBytes = 1;
}

void AudioPlayer::setDecodeThread(MediaDecode* p)
{
    m_pDecodeThread = p;
}

void AudioPlayer::setTotalPts(int64_t nTotal)
{
    m_nTotalPts = nTotal;
}

void AudioPlayer:: setStartPts(int64_t iPts)
{
    m_iStartPts = iPts;
}

void AudioPlayer::setMute(bool bMute)
{
    m_bMute = bMute;
}


void AudioPlayer::seek()
{
    SDL_ClearQueuedAudio(m_audioDeviceID);
}


void AudioPlayer::pause()
{
    m_mutex.lock();
    m_status = TH_PAUSED;
    m_mutex.unlock();

    SDL_PauseAudioDevice(m_audioDeviceID,1);
}


void AudioPlayer::resume()
{
    m_mutex.lock();
    m_status = TH_RUNNING;
    m_mutex.unlock();

    SDL_PauseAudioDevice(m_audioDeviceID,0);
}


void AudioPlayer::stop()
{
    QMutexLocker locker(&m_mutex);
    m_status = TH_STOPPED;
}


double AudioPlayer::getCurPlayingTime()
{
    double dLeft = 0.0, ret = 0.0;
    int64_t curPts = 0;
    m_mutex.lock();
    curPts = m_iCurPts;
    dLeft = (double)SDL_GetQueuedAudioSize(m_audioDeviceID);
    m_mutex.unlock();

    ret = 1000.0 *double(curPts-m_iStartPts) * double(m_timebase.num) / double(m_timebase.den) - 1000.0 * dLeft /double(m_spec.channels*m_spec.freq)/double(m_nBytes);
    return ret;
}

void AudioPlayer::run()
{
    if(m_pDecodeThread == NULL)
    {
        return;
    }

    m_bMute = false;
    m_timebase = m_pDecodeThread->getAudioTimeBase();

    AVSampleFormat outAudiofmt = AV_SAMPLE_FMT_NONE;

    SDL_AudioSpec wanted_spec;
    SDL_zero(wanted_spec);
    SDL_zero(m_spec);

    uint8_t *pcm_buffer = NULL;
    int pcm_buffer_len = 0;
    wanted_spec.freq = 48000;
    wanted_spec.format = AUDIO_F32LSB;
    wanted_spec.channels = 2;
    wanted_spec.silence = 0;
    wanted_spec.samples = 480;
    wanted_spec.callback = NULL;
    wanted_spec.userdata = NULL;

    if((m_audioDeviceID = SDL_OpenAudioDevice(NULL,0,&wanted_spec,&m_spec,SDL_AUDIO_ALLOW_ANY_CHANGE))<2)
    {
        return;
    }

//    AUDIO_U8
//    AUDIO_S8
//    AUDIO_U16LSB	AUDIO_U16	AUDIO_U16SYS
//    AUDIO_S16LSB	AUDIO_S16	AUDIO_S16SYS
//    AUDIO_U16MSB
//    AUDIO_S16MSB
//    AUDIO_S32LSB	AUDIO_S32	AUDIO_S32SYS
//    AUDIO_S32MSB
//    AUDIO_F32LSB	AUDIO_F32	AUDIO_F32SYS
//    AUDIO_F32MSB

//    AV_SAMPLE_FMT_U8
//    AV_SAMPLE_FMT_S16
//    AV_SAMPLE_FMT_S32
//    AV_SAMPLE_FMT_FLT
//    AV_SAMPLE_FMT_DBL

    m_nBytes = 1;
    switch (m_spec.format)
    {
    case AUDIO_U8:
    case AUDIO_S8:
        outAudiofmt = AV_SAMPLE_FMT_U8;        
        break;
    case AUDIO_U16LSB:
    case AUDIO_S16LSB:
    case AUDIO_U16MSB:
    case AUDIO_S16MSB:
        outAudiofmt = AV_SAMPLE_FMT_S16;
        m_nBytes = 2;
        break;
    case AUDIO_S32LSB:
    case AUDIO_S32MSB:
        outAudiofmt = AV_SAMPLE_FMT_S32;
        m_nBytes = 4;
        break;
    case AUDIO_F32LSB:
    case AUDIO_F32MSB:
        outAudiofmt = AV_SAMPLE_FMT_FLT;
        m_nBytes = 4;
        break;
    default:
        break;
    }

    pcm_buffer_len = m_spec.channels*m_spec.freq;
    pcm_buffer = (uint8_t *)av_mallocz(pcm_buffer_len);

    SwrContext *pSoundConvert = NULL;
    {
        pSoundConvert = swr_alloc();
        AVChannelLayout outChannelLayout = AV_CHANNEL_LAYOUT_STEREO;
        AVChannelLayout inChannelLayout = m_pDecodeThread->getAudioChannelLayout();
        swr_alloc_set_opts2(&pSoundConvert, &outChannelLayout, outAudiofmt, m_spec.freq, &inChannelLayout, m_pDecodeThread->getAudioSampleFormat(), m_pDecodeThread->getAudioSampleRate(), 0, NULL);
        if (!pSoundConvert || swr_init(pSoundConvert) < 0)
        {
            return;
        }
    }

    SDL_PauseAudioDevice(m_audioDeviceID,0);

    double dAudioErr = 0.0, dblAudioSamples = 0.0;
    int nAudioNeedSamples = 0;

    AVFrame* pAudioFrame = NULL;
    m_status = TH_RUNNING;

    Thread_Status status;

    while(1)
    {
        m_mutex.lock();
        status = m_status;
        m_mutex.unlock();

        if(status == TH_STOPPED)
        {
            break;
        }
        if(status == TH_PAUSED )
        {
            msleep(200);
            continue;
        }

        msleep(20);
        if(SDL_GetQueuedAudioSize(m_audioDeviceID) < 1920000)
        {
            pAudioFrame = m_pDecodeThread->nextAudioFrame();
            if(pAudioFrame != NULL)
            {
                if(m_nTotalPts > 0 && pAudioFrame->pts > m_nTotalPts)
                {
                    break;
                }
                SDL_memset(pcm_buffer, 0, pcm_buffer_len);
                if(!m_bMute)
                {
                    swr_convert(pSoundConvert,&pcm_buffer,pcm_buffer_len,(const uint8_t **)(pAudioFrame->data),pAudioFrame->nb_samples);
                }
                dblAudioSamples = double(m_timebase.num)*double(m_spec.freq)*double(pAudioFrame->duration)/double(m_timebase.den);
                nAudioNeedSamples = int(dblAudioSamples);
                dAudioErr += (dblAudioSamples - nAudioNeedSamples);
                if(dAudioErr >= 1.0)
                {
                    nAudioNeedSamples++;
                    dAudioErr--;
                }
                int out_buffer_size = av_samples_get_buffer_size(NULL, m_spec.channels, nAudioNeedSamples, outAudiofmt, 1);
                //SDL_MixAudio(pcm_buffer,pcm_buffer,out_buffer_size,SDL_MIX_MAXVOLUME/10);
                SDL_QueueAudio(m_audioDeviceID,pcm_buffer,out_buffer_size);
                m_iCurPts = pAudioFrame->pts;
            }
        }
        else
        {
            msleep(200);
        }
    }

    if(pcm_buffer != NULL)
    {
        av_free(pcm_buffer);
    }
    SDL_ClearQueuedAudio(m_audioDeviceID);
    SDL_CloseAudioDevice(m_audioDeviceID);
    m_status = TH_STOPPED;
}

视频播放源代码:

#ifndef MEDIAPLAYER_H
#define MEDIAPLAYER_H


extern "C"
{
    #include <stdlib.h>
    #include <stdio.h>
    #include <string.h>
    #include <math.h>
    #include "libavutil/avassert.h"
    #include "libavutil/channel_layout.h"
    #include "libavutil/opt.h"
    #include "libavformat/avformat.h"
    #include "libswscale/swscale.h"
    #include "libswresample/swresample.h"
    #include "libavfilter/avfilter.h"
    #include "libavcodec/avcodec.h"
    #include "libavutil/imgutils.h"
    #include <libavcodec/avcodec.h>
    #include <libavformat/avformat.h>
    #include <libswscale/swscale.h>
    #include "sdl/include/SDL.h"
    #undef main
};


#include <QThread>
#include <QObject>
#include <QMutex>
#include <QMutexLocker>
#include <QWaitCondition>
#include <QQueue>
#include "mediadecode.h"
#include "audioplayer.h"
#include <QApplication>




class MediaPlayer: public QThread
{
    enum Thread_Status{TH_STOPPED, TH_RUNNING, TH_PAUSED};
    Q_OBJECT
public:
    MediaPlayer(WId wid);
    virtual ~MediaPlayer();

Q_SIGNALS:
    void error(QString strErr);
    void opened(int w, int h, int64_t videoSeconds, int64_t audioSeconds);
    //void render_SDL(uint8_t* imgY, int linesY/*,uint8_t* imgU, int linesU,uint8_t* imgV, int linesV*/);
    //void render_SDL();

public:
    bool open(QString strFilePath);
    void pause();
    void resume();
    void stop();
    void seek(int64_t iSecond);
    void setMute(bool bMute);
    QString getFileName();
    QImage capture();
    double getCurTimeStamp();

private slots:
    void openFailed(QString strErr);
    void openSucceed(int w, int h, int64_t videoSeconds, int64_t audioSeconds);

protected:
    void run();

private:
    QMutex m_mutex;
    QString m_strFileName;
    WId m_wid=-1;

    SDL_Window *m_sdlWindow = NULL;
    SDL_Renderer *m_sdlRenderer = NULL;
    SDL_Texture *m_sdlTexture = NULL;

    MediaDecode *decodeThread;
    AudioPlayer *audioThread;

    Thread_Status m_status;
    double m_curTimeStamp;
};

#endif // MEDIAPLAYER_H


#include "mediaplayer.h"

#include <QImage>
#include <QTime>
#include <QApplication>
#include "mediadecode.h"



MediaPlayer::MediaPlayer(WId wid)
{
    SDL_Init(SDL_INIT_EVERYTHING);

    m_status = TH_STOPPED;

    m_wid = wid;
    m_curTimeStamp = 0.0;

    m_sdlWindow = SDL_CreateWindowFrom((const void*)wid);
    m_sdlRenderer=SDL_CreateRenderer(m_sdlWindow,-1,SDL_RENDERER_TARGETTEXTURE);
    audioThread = new AudioPlayer();

   // m_videoOutFormat = AV_PIX_FMT_YUV420P/*AV_PIX_FMT_RGB32*/;

    decodeThread = new MediaDecode();

    connect(decodeThread, &MediaDecode::sigError, this, &MediaPlayer::openFailed);
    connect(decodeThread, &MediaDecode::sigOpened, this, &MediaPlayer::openSucceed);
}


MediaPlayer::~MediaPlayer()
{
    decodeThread->stop();
    decodeThread->quit();
    if(!decodeThread->wait(3000))
    {
        decodeThread->terminate();
        decodeThread->wait();
    }    

    audioThread->stop();
    audioThread->quit();
    if(!audioThread->wait(3000))
    {
        audioThread->terminate();
        audioThread->wait();
    }
    SDL_Quit();
}


void MediaPlayer::openFailed(QString strErr)
{
    emit error(strErr);
}


void MediaPlayer::openSucceed(int w, int h, int64_t videoSeconds, int64_t audioSeconds)
{
    emit opened(w, h, videoSeconds, audioSeconds);
}


void MediaPlayer::run()
{
//    qDebug() << "MediaPlayer::run Thread id is:" << QThread::currentThreadId();
    int64_t videoLastPts = decodeThread->getVideoStartPts();
    int64_t videoStartPts = videoLastPts;
    AVRational videoTB = decodeThread->getVideoTimeBase();
    int64_t nTotalPts = decodeThread->getVideoTotalPts();

    QTime timer;
    int width = decodeThread->getVideoFrameWidth();
    int height = decodeThread->getVideoFrameHeight();
    bool bHasVideo = decodeThread->hasVideo();
    bool bHasAudio = decodeThread->hasAudio();

    m_sdlTexture=SDL_CreateTexture(m_sdlRenderer,/*SDL_PIXELFORMAT_BGRA32*/SDL_PIXELFORMAT_IYUV,SDL_TEXTUREACCESS_STATIC/*SDL_TEXTUREACCESS_STREAMING*/,width,height);

    //AudioPlayer *audioP = new AudioPlayer();
    audioThread->setDecodeThread(decodeThread);
    audioThread->setTotalPts(decodeThread->getAudioTotalPts());
    audioThread->setStartPts(decodeThread->getAudioStartPts());
    if(bHasAudio)
    {
        audioThread->start();
    }

    double dblVideoErr = 0.0, videoDelay = 0.0;
    int nVideoDelay = 0;

    AVFrame* pVideoFrame = NULL;
    m_status = TH_RUNNING;
    Thread_Status status;
    m_curTimeStamp = 0.0;

    while(bHasVideo)
    {
        m_mutex.lock();
        status = m_status;
        m_mutex.unlock();
        if(status == TH_STOPPED)
        {
            break;
        }
        if(status == TH_PAUSED)
        {
            msleep(200);
            continue;
        }

        timer.start();

        pVideoFrame = decodeThread->nextVideoFrame();
        if(pVideoFrame != NULL)
        {
            if(nTotalPts > 0 && pVideoFrame->pts > nTotalPts)
            {
                break;
            }

            SDL_UpdateYUVTexture(m_sdlTexture,NULL,pVideoFrame->data[0],pVideoFrame->linesize[0],pVideoFrame->data[1],pVideoFrame->linesize[1],pVideoFrame->data[2],pVideoFrame->linesize[2]);
            //SDL_UpdateTexture(m_sdlTexture,NULL,pVideoFrame->data[0],pVideoFrame->linesize[0]);
            SDL_RenderClear(m_sdlRenderer);
            SDL_RenderCopy(m_sdlRenderer,m_sdlTexture,NULL,NULL);
            m_curTimeStamp = 1000.0 * double(pVideoFrame->pts - videoStartPts) * double(videoTB.num) / double(videoTB.den);

            if(bHasAudio)
            {
                double audioStamp = audioThread->getCurPlayingTime();
                if(m_curTimeStamp > audioStamp && (m_curTimeStamp - audioStamp) < 500)
                {
                    msleep(int(m_curTimeStamp-audioStamp));
                }
            }
            else
            {
                videoDelay = 1000.0*double(pVideoFrame->pts-videoLastPts) * double(videoTB.num) / double(videoTB.den) - double(timer.elapsed());
                videoDelay = videoDelay > 0 ? videoDelay : 0.0;
                nVideoDelay = int(videoDelay);
                dblVideoErr += (videoDelay - nVideoDelay);
                if(dblVideoErr >= 1.0)
                {
                    nVideoDelay += 1;
                    dblVideoErr -= 1.0;
                }
                msleep(nVideoDelay);
            }
            SDL_RenderPresent(m_sdlRenderer);
            videoLastPts = pVideoFrame->pts;
        }
        else
        {
            msleep(20);
        }
    }
    m_status = TH_STOPPED;
}


double MediaPlayer::getCurTimeStamp()
{
    double ret = 0.0;

    m_mutex.lock();
    bool bHasVideo = decodeThread->hasVideo();
    bool bHasAudio = decodeThread->hasAudio();
    if(bHasVideo)
    {
        ret = m_curTimeStamp;
    }
    m_mutex.unlock();

    if(bHasAudio)
    {
        ret = audioThread->getCurPlayingTime();
    }

    return ret;
}


bool MediaPlayer::open(QString strFilePath)
{
    if(m_status != TH_STOPPED)
    {
        stop();
    }
    m_strFileName = strFilePath;
    decodeThread->setUrl(strFilePath);
    decodeThread->start();
    start();
    return true;
}


void MediaPlayer::pause()
{
    audioThread->pause();
    decodeThread->pause();
    m_mutex.lock();
    m_status = TH_PAUSED;
    m_mutex.unlock();
}


void MediaPlayer::resume()
{
    audioThread->resume();
    decodeThread->resume();
    m_mutex.lock();
    m_status = TH_RUNNING;
    m_mutex.unlock();
}


void MediaPlayer::stop()
{
    decodeThread->stop();
    decodeThread->quit();
    if(!decodeThread->wait(3000))
    {
        decodeThread->terminate();
        decodeThread->wait();
    }

    audioThread->stop();
    audioThread->quit();
    if(!audioThread->wait(3000))
    {
        audioThread->terminate();
        audioThread->wait();
    }
    m_mutex.lock();
    m_status = TH_STOPPED;
    m_mutex.unlock();
}


void MediaPlayer::setMute(bool bMute)
{
    audioThread->setMute(bMute);
}


void MediaPlayer::seek(int64_t iSecond)
{
    audioThread->seek();
    decodeThread->seek(iSecond);
}


QString MediaPlayer::getFileName()
{
    return m_strFileName;
}



重用只需跟MediaPlayer交互。

首先声明两个变量:

MediaPlayer *m_player;
SDL_Window *m_sdlWindow;

在界面中放一个label控件,取名lbImage。

再初始化两个变量:

m_player = new MediaPlayer(ui->lbImage->winId());
m_sdlWindow = SDL_CreateWindowFrom((const void *)ui->lbImage->winId());

在适当的地方调用open、pause、resume、seek函数进行控制。

几个坑:

1、音频处理的AVFrame的duration与音频采样数量及频率计算结果不一致,这时候应以duration为准。这段代码就是计算实际需要的样本数,并处理累计误差,当误差大于1时,自动消除。

dblAudioSamples = double(m_timebase.num)*double(m_spec.freq)*double(pAudioFrame->duration)/double(m_timebase.den);
nAudioNeedSamples = int(dblAudioSamples);
dAudioErr += (dblAudioSamples - nAudioNeedSamples);
if(dAudioErr >= 1.0)
{
    nAudioNeedSamples++;
    dAudioErr--;
}
int out_buffer_size = av_samples_get_buffer_size(NULL, m_spec.channels, nAudioNeedSamples, outAudiofmt, 1);
SDL_QueueAudio(m_audioDeviceID,pcm_buffer,out_buffer_size);

2、调用SDL_OpenAudioDevice时第四个参数obtained务必不能设置成NULL,这个表示实际系统返回给你的声卡设置参数,跟硬件相关,所以跟你第三个参数传进去的可能不一样,如果出现这种情况,是出不来声音的。网上很多资源都忽略了这点。

3、音频数据传入使用了SDL_QueueAudio方式,没有用CallBack方式,这种方式更好理解。

4、音视频同步优先音频的时钟,因为音频播放时硬件根据采样频率来控制的,更精确,在没有音频时,才使用视频。

  • 2
    点赞
  • 6
    收藏
    觉得还不错? 一键收藏
  • 2
    评论
评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值