live555 rtspclient管理, 本文实现一个管理类去管理rtspclient。采用多线程的库为jthread去驱动多个rtspclient。(需要rtspclient会话管理源码一套联系QQ:123011785)
先理解live555主要是一个为流媒体提供解决方案的跨平台的C++开源项目,它实现了对标准流媒体传输是一个为流媒体提供解决方案的跨平台的C++开源项目,它实现了对标准流媒体传输协议如RTP/RTCP、RTSP、SIP等的支持。Live555实现了对多种音视频编码格式的音视频数据的流化、接收和处理等支持,包括MPEG、H.263+、DV、JPEG视频和多种音频编码。同时由于良好的设计,Live555非常容易扩展对其他格式的支持。
(需要源码可以联系QQ:123011785)
首先构造live555 rtspclient的会话( LiveRtspClientSession) ,采用多线程的库为jthread
1、LiveRtspClientSession.h头文件代码:
/******************************************************************************
* FILE: RtspClientSession.h
* Description:
* Interface for rtsp client session class.
*
* Modified Code History
* Mark Date By Modification Reason
*******************************************************************************
* 01 2013/5/28 songxw Initial creation.
******************************************************************************/
#if !defined(__LIVE_RTSP_CLIENT_SESSION_H__)
#define __LIVE_RTSP_CLIENT_SESSION_H__
using namespace std;
#include "TypeDef.h"
#include "ConfigMgr.h"
#include "Module.h"
#include "MsgStruct.h"
#include "liveMedia.hh"
#include "BasicUsageEnvironment.hh"
#include "RTSPCommon.hh"
#include "jthread.h"
using namespace jthread;
typedef enum
{
LIVE_RESP_NONE = 0,
LIVE_RESP_SUCESS = 1,
LIVE_RESP_FAILED = 2,
}LiveRtspRespType_E;
typedef void (*LivePlayCallBack)(int PlayHandle, unsigned int dwDataType, unsigned char *pBuffer, unsigned int dwBufSize, bool bMarker, long UserData);
//typedef void (*LivePlayCallBack)(void* clientData, int dataType, unsigned char*buffer, int size, int frameType);
#define RTSP_CLIENT_VERBOSITY_LEVEL 1 //1 // by default, print verbose output from each "RTSPClient"
#define DUMMY_SINK_RECEIVE_BUFFER_SIZE (2<<20)
class StreamClientState
{
public:
StreamClientState();
virtual ~StreamClientState();
public:
MediaSubsessionIterator* iter;
MediaSession* session;
MediaSubsession* subsession;
TaskToken streamTimerTask;
double duration;
//bool m_bRtspSucess;
LiveRtspRespType_E m_eRtspRespType;
long VideoHdl; //视频句柄,具有唯一性
long AudioHdl; //音频句柄,具有唯一性
long UserData; //用户数据,可以是句柄或者其它应用指针地址
LivePlayCallBack MediaCBFunc; //媒体流回调函数指针
char m_RtspUrl[HTTP_URL_MAX_LEN+1];
bool m_bTcp; //是否用tcp请求码流
bool m_bMulticast; //是否用组播取流
bool m_bSendTeardown;
};
class ourRTSPClient: public RTSPClient
{
public:
static ourRTSPClient* createNew(UsageEnvironment& env, char const* rtspURL,
int verbosityLevel = 0,
char const* applicationName = NULL,
portNumBits tunnelOverHTTPPortNum = 0);
protected:
ourRTSPClient(UsageEnvironment& env, char const* rtspURL,
int verbosityLevel, char const* applicationName, portNumBits tunnelOverHTTPPortNum);
// called only by createNew();
virtual ~ourRTSPClient();
public:
StreamClientState scs;
};
class DummySink: public MediaSink
{
public:
static DummySink* createNew(UsageEnvironment& env,
MediaSubsession& subsession, // identifies the kind of data that's being received
char const* streamId,
const long videohdl, const long audiohdl,
LivePlayCallBack callback, const long &userdate); // identifies the stream itself (optional)
private:
DummySink(UsageEnvironment& env, MediaSubsession& subsession, char const* streamId, const long videohdl, const long audiohdl, LivePlayCallBack callback, const long &userdate);
// called only by "createNew()"
virtual ~DummySink();
static void afterGettingFrame(void* clientData, unsigned frameSize,
unsigned numTruncatedBytes,
struct timeval presentationTime,
unsigned durationInMicroseconds);
void afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes,
struct timeval presentationTime, unsigned durationInMicroseconds);
//virtual int StreamRecvCallBack(unsigned int dwDataType, unsigned char *pBuffer, unsigned int dwBufSize, bool bMarker, long UserData) = 0;
private:
// redefined virtual functions:
virtual Boolean continuePlaying();
private:
//u_int8_t RecvBuf[DUMMY_SINK_RECEIVE_BUFFER_SIZE+1];
u_int8_t* fReceiveBuffer;
MediaSubsession& fSubsession;
char* fStreamId;
long VideoHdl; //视频句柄,具有唯一性
long AudioHdl; //音频句柄,具有唯一性
long UserData; //用户数据,可以是句柄或者其它应用指针地址
LivePlayCallBack MediaCBFunc; //媒体流回调函数指针
};
/* Rtsp client session interface class */
class CLiveRtspClientSession: public JThread
{
public:
CLiveRtspClientSession(int Chan);
~CLiveRtspClientSession(void);
void* Thread(void);
void ThreadQuit(bool bSendDown = true);
public:
int LiveRealPlay(char *RtspUrl, LivePlayCallBack PlayCBFunc, const long& userdate, bool btcp, bool bMulticast);
int LiveStop(void);
private:
int openURL(UsageEnvironment& env, char const* progName, char const* rtspURL);
static void continueAfterOPTIONS(RTSPClient* rtspClient, int resultCode, char* resultString);
static void continueAfterDESCRIBE(RTSPClient* rtspClient, int resultCode, char* resultString);
static void setupNextSubsession(RTSPClient* rtspClient);
static void continueAfterSETUP(RTSPClient* rtspClient, int resultCode, char* resultString);
static void continueAfterPLAY(RTSPClient* rtspClient, int resultCode, char* resultString);
static void subsessionAfterPlaying(void* clientData);
static void subsessionByeHandler(void* clientData);
static void streamTimerHandler(void* clientData);
static void sessionAfterGetParam(RTSPClient* rtspClient, int resultCode, char* resultString);
static void shutdownStream(RTSPClient* rtspClient, int exitCode = 1);
private:
static int LiveIndex;
static CCritSec CritSec_LiveIndex;
int LiveIndexAdd()
{
CAutoLock AutoLock(&CritSec_LiveIndex);
return ++LiveIndex;
}
//int StreamRecvCallBack(unsigned int dwDataType, unsigned char *pBuffer, unsigned int dwBufSize, bool bMarker, long UserData);
private:
UsageEnvironment* env;
RTSPClient* rtspClient;
//static char eventLoopWatchVariable;
char eventLoopWatchVariable;
public:
int Channel;
bool bAudio;
long VideoHdl; //视频句柄,具有唯一性
long AudioHdl; //音频句柄,具有唯一性
// long UserData; //用户数据,可以是句柄或者其它应用指针地址
// LivePlayCallBack MediaCBFunc; //媒体流回调函数指针
};
#endif //#if !defined(__RTSP_CLIENT_SESSION_H__)
2、 LiveRtspClientSession.cpp代码如下:
////////////////////////////////////////////////////////////////////////////////
// 版权说明, 2010-2020,
////////////////////////////////////////////////////////////////////////////////
// 文件名 : LiveRtspClientSession.cpp
// 作 者 :
// 版 本 : 3.0
// 日 期 : 2016-12-22
// 功能描述: Live555封装的rtspclient会话
// 其他说明:
//
// 修改历史:
// 日期(YYYY-MM-DD) 版本 修改人 修改内容
// 2016-12-22 3.0 songxw 初始创建
////////////////////////////////////////////////////////////////////////////////
#include "LiveRtspClientSession.h"
/******************************************************************************
* 函数介绍 : 用户指针的信息
* 输入参数 :
*
*
* 输出参数 : 无
* 返回值 : int
*****************************************************************************/
StreamClientState::StreamClientState()
: iter(NULL), session(NULL), subsession(NULL), streamTimerTask(NULL), duration(0.0)
{
m_eRtspRespType = LIVE_RESP_NONE;
VideoHdl = -1;
AudioHdl = -1;
memset(m_RtspUrl, 0, sizeof(m_RtspUrl));
m_bTcp = false;
m_bMulticast = false;
m_bSendTeardown = true;
}
StreamClientState::~StreamClientState()
{
delete iter;
if (session != NULL) {
// We also need to delete "session", and unschedule "streamTimerTask" (if set)
UsageEnvironment& env = session->envir(); // alias
env.taskScheduler().unscheduleDelayedTask(streamTimerTask);
Medium::close(session);
}
}
ourRTSPClient* ourRTSPClient::createNew(UsageEnvironment& env, char const* rtspURL, int verbosityLevel, char const* applicationName, portNumBits tunnelOverHTTPPortNum)
{
return new ourRTSPClient(env, rtspURL, verbosityLevel, applicationName, tunnelOverHTTPPortNum);
}
ourRTSPClient::ourRTSPClient(UsageEnvironment& env, char const* rtspURL,
int verbosityLevel, char const* applicationName, portNumBits tunnelOverHTTPPortNum)
: RTSPClient(env,rtspURL, verbosityLevel, applicationName, tunnelOverHTTPPortNum, -1)
{
}
ourRTSPClient::~ourRTSPClient()
{
}
/******************************************************************************
* 函数介绍 : 接收数据的sink
* 输入参数 :
*
*
* 输出参数 : 无
* 返回值 : int
*****************************************************************************/
DummySink::DummySink(UsageEnvironment& env, MediaSubsession& subsession, char const* streamId,
const long videohdl, const long audiohdl,
LivePlayCallBack callback, const long &userdate)
: MediaSink(env),fSubsession(subsession)
{
fStreamId = strDup(streamId);
fReceiveBuffer = new u_int8_t[DUMMY_SINK_RECEIVE_BUFFER_SIZE];
VideoHdl = videohdl;
AudioHdl = audiohdl;
MediaCBFunc = callback;
UserData = userdate;
}
DummySink::~DummySink()
{
delete[] fStreamId;
if (fReceiveBuffer != NULL)
{
delete[] fReceiveBuffer;
fReceiveBuffer = NULL;
}
}
DummySink* DummySink::createNew(UsageEnvironment& env, MediaSubsession& subsession, char const* streamId,
const long videohdl, const long audiohdl,
LivePlayCallBack callback, const long &userdate)
{
return new DummySink(env, subsession, streamId, videohdl, audiohdl, callback, userdate);
}
void DummySink::afterGettingFrame(void* clientData, unsigned frameSize, unsigned numTruncatedBytes,
struct timeval presentationTime, unsigned durationInMicroseconds)
{
DummySink* sink = (DummySink*)clientData;
sink->afterGettingFrame(frameSize, numTruncatedBytes, presentationTime, durationInMicroseconds);
}
void DummySink::afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes,
struct timeval presentationTime, unsigned durationInMicroseconds)
{
if (fReceiveBuffer == NULL)
{
return;
}
//接收的数据
//回调函数
bool bMaker = true;
u_int8_t *pRecvBuffer = NULL;
if (strcmp(fSubsession.mediumName(), "video") == 0) //视频
{
fReceiveBuffer[0] = 0x00;
fReceiveBuffer[1] = 0x00;
fReceiveBuffer[2] = 0x00;
fReceiveBuffer[3] = 0x01;
pRecvBuffer = fReceiveBuffer;
if (frameSize > 2 && (pRecvBuffer[4] == 0x67 || pRecvBuffer[4] == 0x68))
{
bMaker = false;
}
if ((pRecvBuffer[4] == 0x00) && (pRecvBuffer[5] == 0x00) && (pRecvBuffer[6] == 0x00) && (pRecvBuffer[7] == 0x01))
{
pRecvBuffer += 4;
frameSize -= 4;
}
MediaCBFunc(VideoHdl, 0, pRecvBuffer, frameSize+4, bMaker, UserData);
}
else if ((strcmp(fSubsession.mediumName(), "audio") == 0)) //音频
{
pRecvBuffer = fReceiveBuffer + 4;
MediaCBFunc(VideoHdl, 1, pRecvBuffer, frameSize, bMaker, UserData);
}
// Then continue, to request the next frame of data:
continuePlaying();
}
Boolean DummySink::continuePlaying()
{
if (fSource == NULL)
return False; // sanity check (should not happen)
// Request the next frame of data from our input source. "afterGettingFrame()" will get called later, when it arrives:
fSource->getNextFrame(fReceiveBuffer+4, DUMMY_SINK_RECEIVE_BUFFER_SIZE,
afterGettingFrame, this,
onSourceClosure, this);
return True;
}
//RTPSource
CLiveRtspClientSession::CLiveRtspClientSession(int Chan)
{
Channel = Chan;
VideoHdl = LiveIndexAdd();
AudioHdl = VideoHdl;
//初始化 live555的信息
env = NULL;
rtspClient = NULL;
eventLoopWatchVariable = 0;
}
CLiveRtspClientSession::~CLiveRtspClientSession(void)
{
}
int CLiveRtspClientSession::LiveIndex = 0;
CCritSec CLiveRtspClientSession::CritSec_LiveIndex;
//Jstream Entry Point
void* CLiveRtspClientSession::Thread(void)
{
ThreadStarted();
//开始
eventLoopWatchVariable = 0;
env->taskScheduler().doEventLoop(&eventLoopWatchVariable);
DBGPrint(M_RtspClientLib, BREAK_LEVEL, "11111111111111111Jstream::ThreadQuit Sucess.");
return NULL;
}
void CLiveRtspClientSession::ThreadQuit(bool bSendDown)
{
//关闭线程
eventLoopWatchVariable = 1;
DBGPrint(M_RtspClientLib, BREAK_LEVEL, "2222222222222222Jstream::ThreadQuit Sucess.");
if (bSendDown)
{
usleep(10*1000);
shutdownStream(rtspClient);
}
}
int CLiveRtspClientSession::LiveRealPlay(char *RtspUrl, LivePlayCallBack PlayCBFunc, const long& userdate, bool btcp, bool bMulticast)
{
//如果线程已经开启 直接返回失败
if(eventLoopWatchVariable == 1)
{
DBGPrint(M_RtspClientLib, BREAK_LEVEL, "%s: eventLoopWatchVariable,need wait thread quit!!!", __FUNCTION__);
return -1;
}
TaskScheduler* scheduler = BasicTaskScheduler::createNew();
//UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);
if(NULL == env)
{
env = BasicUsageEnvironment::createNew(*scheduler);
}
int iRet = openURL(*env, "rtsplib", RtspUrl);
if(iRet < 0)
{
DBGPrint(M_RtspClientLib, ERROR_LEVEL, "%s: openURL rtspurl<%s> Fiailed!", __FUNCTION__, RtspUrl);
return -1;
}
//赋值
StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs;
scs.m_eRtspRespType = LIVE_RESP_NONE;
scs.VideoHdl = VideoHdl;
scs.AudioHdl = AudioHdl;
scs.UserData = userdate;
scs.MediaCBFunc = PlayCBFunc;
strncpy2(scs.m_RtspUrl, RtspUrl, HTTP_URL_MAX_LEN);
scs.m_bTcp = btcp;
scs.m_bMulticast = bMulticast;
//开启线程
Start();
//等待1s返回结果
bool brealplaystatus = false;
int resulttimes = 1000;
while(resulttimes--)
{
//返回失败
if(LIVE_RESP_FAILED == scs.m_eRtspRespType)
{
break;
}
//返回成功
if(LIVE_RESP_SUCESS == scs.m_eRtspRespType)
{
brealplaystatus = true;
break;
}
usleep(1000);
}
if(false == brealplaystatus)
{
DBGPrint(M_RtspClientLib, ERROR_LEVEL, "%s: the rtsp<%s> realplay Failed!!(m_bRtspSucess==false)!", __FUNCTION__, RtspUrl);
ThreadQuit(false);
return -1;
}
DBGPrint(M_RtspClientLib, BREAK_LEVEL, "%s:eventLoopWatchVariable:%d , Rtsp<%s> is Sucess VideoHdl<%ld>, btcp<%s>, bMulticast<%s>!", __FUNCTION__, \
eventLoopWatchVariable, RtspUrl, VideoHdl, btcp?"true":"false", bMulticast?"true":"false");
return VideoHdl;
}
int CLiveRtspClientSession::LiveStop(void)
{
DBGPrint(M_RtspClientLib, BREAK_LEVEL, "%s: eventLoopWatchVariable:%d ,Stop Live555 Rtsp Client!!!", __FUNCTION__, eventLoopWatchVariable);
ThreadQuit();
return 0;
}
int CLiveRtspClientSession::openURL(UsageEnvironment& env, char const* progName, char const* rtspURL)
{
Authenticator authenticator;
bool bsetauth = false;
if (NULL == rtspClient)
{
char username[64+1] = {0};
char passwd[64+1] = {0};
char url[128+1] = {0};
char const* colonPasswordStart = NULL;
char const* from = rtspURL+7; //跳过头
char const* p = NULL;
for (p=from; *p != '\0' && *p != '/'; ++p)
{
if (*p == ':' && colonPasswordStart == NULL)
{
colonPasswordStart = p;
}
else if (*p == '@')
{
// We found <username> (and perhaps <password>). Copy them into newly-allocated result strings:
if (colonPasswordStart == NULL)
{
colonPasswordStart = p;
}
char const* usernameStart = from;
unsigned int usernameLen = colonPasswordStart - usernameStart;
memcpy(username, usernameStart, usernameLen);
char const* passwordStart = colonPasswordStart;
if (passwordStart < p)
{
++passwordStart; // skip over the ':'
}
unsigned passwordLen = p - passwordStart;
memcpy(passwd, passwordStart, passwordLen);
from = p + 1; // skip over the '@'
sprintf(url, "rtsp://%s", from);
DBGPrint(M_RtspClientLib, BREAK_LEVEL,"%s:++++++++++++++++rtspURL:%s, username:%s, passwd:%s, url:%s\n", __FUNCTION__, rtspURL, username, passwd, url);
authenticator.setUsernameAndPassword(username, passwd);
bsetauth = true;
break;
}
}
if (strlen(url) < 7)
{
strcpy(url, rtspURL);
}
rtspClient = ourRTSPClient::createNew(env, url, RTSP_CLIENT_VERBOSITY_LEVEL, progName);
}
if (rtspClient == NULL)
{
DBGPrint(M_RtspClientLib, ERROR_LEVEL, "%s: createNew rtspurl<%s> Fiailed!", __FUNCTION__, rtspURL);
return -1;
}
rtspClient->sendOptionsCommand(continueAfterOPTIONS, bsetauth?&authenticator:NULL);
//rtspClient->sendDescribeCommand(continueAfterDESCRIBE, bsetauth?&authenticator:NULL);
return 0;
}
void CLiveRtspClientSession::continueAfterOPTIONS(RTSPClient* rtspClient, int resultCode, char* resultString)
{
Boolean success = False;
Boolean serverSupportsGetParameter = False;
StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias
scs.m_eRtspRespType = LIVE_RESP_FAILED;
do {
if (resultCode != 0)
{
DBGPrint(M_RtspClientLib, ERROR_LEVEL,"%s:Failed to get a SDP description:%s ", __FUNCTION__, resultString);
break;
}
serverSupportsGetParameter = RTSPOptionIsSupported("GET_PARAMETER", resultString);
//需要饱和
if (serverSupportsGetParameter)
{
scs.duration = 60;
DBGPrint(M_RtspClientLib, BREAK_LEVEL,"%s:++++++serverSupportsGetParameter, need to timer send GetParameter!!!", __FUNCTION__);
}
success = True;
scs.m_eRtspRespType = LIVE_RESP_SUCESS;
DBGPrint(M_RtspClientLib, BREAK_LEVEL,"%s:0.RtspUrl<%s> Recv OPTIONS Sucess resultCode<%d--%s>!!!", __FUNCTION__, scs.m_RtspUrl, resultCode, resultString);
rtspClient->sendDescribeCommand(continueAfterDESCRIBE);
break;
} while (0);
delete[] resultString;
if (success == False)
{
// An unrecoverable error occurred with this stream.
shutdownStream(rtspClient);
}
}
void CLiveRtspClientSession::continueAfterDESCRIBE(RTSPClient* rtspClient, int resultCode, char* resultString)
{
UsageEnvironment& env = rtspClient->envir(); // alias
StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias
do {
if (resultCode != 0)
{
DBGPrint(M_RtspClientLib, ERROR_LEVEL,"%s:Failed to get a SDP description:%s ", __FUNCTION__, resultString);
delete[] resultString;
break;
}
char* const sdpDescription = resultString;
//DBGPrint(M_RtspClientLib, BREAK_LEVEL,"%s: Got a SDP description:%s",__FUNCTION__, sdpDescription );
// Create a media session object from this SDP description:
scs.session = MediaSession::createNew(env, sdpDescription);
delete[] sdpDescription; // because we don't need it anymore
if (scs.session == NULL)
{
DBGPrint(M_RtspClientLib, ERROR_LEVEL,"%s: Failed to create a MediaSession object from the SDP description:", __FUNCTION__);
break;
}
else if (!scs.session->hasSubsessions())
{
DBGPrint(M_RtspClientLib, ERROR_LEVEL,"This session has no media subsessions (i.e., no \"m=\" lines)\n");
break;
}
// Then, create and set up our data source objects for the session. We do this by iterating over the session's 'subsessions',
// calling "MediaSubsession::initiate()", and then sending a RTSP "SETUP" command, on each one.
// (Each 'subsession' will have its own data source.)
scs.iter = new MediaSubsessionIterator(*scs.session);
DBGPrint(M_RtspClientLib, BREAK_LEVEL,"%s:1.RtspUrl<%s> Recv DESCRIBE Sucess!!", __FUNCTION__, scs.m_RtspUrl);
setupNextSubsession(rtspClient);
return;
} while (0);
scs.m_eRtspRespType = LIVE_RESP_FAILED;
DBGPrint(M_RtspClientLib, ERROR_LEVEL,"%s:1.RtspUrl<%s> Recv DESCRIBE Failed resultCode<%d--%s>!!!", __FUNCTION__, scs.m_RtspUrl, resultCode, resultString);
// An unrecoverable error occurred with this stream.
shutdownStream(rtspClient);
}
void CLiveRtspClientSession::setupNextSubsession(RTSPClient* rtspClient)
{
//UsageEnvironment& env = rtspClient->envir(); // alias
StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias
scs.subsession = scs.iter->next();
if (scs.subsession != NULL)
{
if (!scs.subsession->initiate())
{
// env << *rtspClient << "Failed to initiate the \"" << *scs.subsession << "\" subsession: " << env.getResultMsg() << "\n";
setupNextSubsession(rtspClient); // give up on this subsession; go to the next one
}
else
{
//env << *rtspClient << "Initiated the \"" << *scs.subsession << "\" subsession (";
if (scs.subsession->rtcpIsMuxed())
{
//env << "client port " << scs.subsession->clientPortNum();
}
else
{
//env << "client ports " << scs.subsession->clientPortNum() << "-" << scs.subsession->clientPortNum()+1;
}
//env << ")\n";
// Continue setting up this subsession, by sending a RTSP "SETUP" command:
rtspClient->sendSetupCommand(*scs.subsession, continueAfterSETUP, False, scs.m_bTcp, scs.m_bMulticast);
}
return;
}
// We've finished setting up all of the subsessions. Now, send a RTSP "PLAY" command to start the streaming:
if (scs.session->absStartTime() != NULL)
{
// Special case: The stream is indexed by 'absolute' time, so send an appropriate "PLAY" command:
rtspClient->sendPlayCommand(*scs.session, continueAfterPLAY, scs.session->absStartTime(), scs.session->absEndTime());
}
else
{
//scs.duration = scs.session->playEndTime() - scs.session->playStartTime();
rtspClient->sendPlayCommand(*scs.session, continueAfterPLAY);
}
}
void CLiveRtspClientSession::continueAfterSETUP(RTSPClient* rtspClient, int resultCode, char* resultString)
{
do {
UsageEnvironment& env = rtspClient->envir(); // alias
StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias
if (resultCode != 0)
{
//env << *rtspClient << "Failed to set up the \"" << *scs.subsession << "\" subsession: " << resultString << "\n";
break;
}
//env << *rtspClient << "Set up the \"" << *scs.subsession << "\" subsession (";
if (scs.subsession->rtcpIsMuxed())
{
//env << "client port " << scs.subsession->clientPortNum();
}
else
{
//env << "client ports " << scs.subsession->clientPortNum() << "-" << scs.subsession->clientPortNum()+1;
}
//env << ")\n";
// Having successfully setup the subsession, create a data sink for it, and call "startPlaying()" on it.
// (This will prepare the data sink to receive data; the actual flow of data from the client won't start happening until later,
// after we've sent a RTSP "PLAY" command.)
//scs.subsession->sink = DummySink::createNew(env, *scs.subsession, rtspClient->url(), VideoHdl, AudioHdl, MediaCBFunc, UserData);
scs.subsession->sink = DummySink::createNew(env, *scs.subsession, rtspClient->url(), scs.VideoHdl, scs.AudioHdl, scs.MediaCBFunc, scs.UserData);
// perhaps use your own custom "MediaSink" subclass instead
if (scs.subsession->sink == NULL)
{
//env << *rtspClient << "Failed to create a data sink for the \"" << *scs.subsession
//<< "\" subsession: " << env.getResultMsg() << "\n";
break;
}
//DBGPrint(M_RtspClientLib, BREAK_LEVEL,"Created a data sink for the subsession:%p", scs.subsession);
scs.subsession->miscPtr = rtspClient; // a hack to let subsession handler functions get the "RTSPClient" from the subsession
scs.subsession->sink->startPlaying(*(scs.subsession->readSource()),
subsessionAfterPlaying, scs.subsession);
// Also set a handler to be called if a RTCP "BYE" arrives for this subsession:
if (scs.subsession->rtcpInstance() != NULL)
{
scs.subsession->rtcpInstance()->setByeHandler(subsessionByeHandler, scs.subsession);
}
} while (0);
delete[] resultString;
// Set up the next subsession, if any:
setupNextSubsession(rtspClient);
}
void CLiveRtspClientSession::continueAfterPLAY(RTSPClient* rtspClient, int resultCode, char* resultString)
{
Boolean success = False;
UsageEnvironment& env = rtspClient->envir(); // alias
StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias
do {
if (resultCode != 0)
{
DBGPrint(M_RtspClientLib, BREAK_LEVEL,"Failed to start playing Code<%d> session:%s ", resultCode, resultString);
break;
}
// Set a timer to be handled at the end of the stream's expected duration (if the stream does not already signal its end
// using a RTCP "BYE"). This is optional. If, instead, you want to keep the stream active - e.g., so you can later
// 'seek' back within it and do another RTSP "PLAY" - then you can omit this code.
// (Alternatively, if you don't want to receive the entire stream, you could set this timer for some shorter value.)
if (scs.duration > 0)
{
unsigned uSecsToDelay = (unsigned)(scs.duration*1000000);
scs.streamTimerTask = env.taskScheduler().scheduleDelayedTask(uSecsToDelay, (TaskFunc*)streamTimerHandler, rtspClient);
}
DBGPrint(M_RtspClientLib, BREAK_LEVEL,"%s:Recv Play Sucess Use Time<%f>!", __FUNCTION__, scs.duration);
success = True;
//设置成功标志
scs.m_eRtspRespType = LIVE_RESP_SUCESS;
} while (0);
delete[] resultString;
if (!success)
{
// An unrecoverable error occurred with this stream.
shutdownStream(rtspClient);
}
}
void CLiveRtspClientSession::subsessionAfterPlaying(void* clientData)
{
MediaSubsession* subsession = (MediaSubsession*)clientData;
RTSPClient* rtspClient = (RTSPClient*)(subsession->miscPtr);
// Begin by closing this subsession's stream:
Medium::close(subsession->sink);
subsession->sink = NULL;
// Next, check whether *all* subsessions' streams have now been closed:
MediaSession& session = subsession->parentSession();
MediaSubsessionIterator iter(session);
while ((subsession = iter.next()) != NULL)
{
if (subsession->sink != NULL)
return; // this subsession is still active
}
// All subsessions' streams have now been closed, so shutdown the client:
shutdownStream(rtspClient);
}
void CLiveRtspClientSession::subsessionByeHandler(void* clientData)
{
MediaSubsession* subsession = (MediaSubsession*)clientData;
//RTSPClient* rtspClient = (RTSPClient*)subsession->miscPtr;
//UsageEnvironment& env = rtspClient->envir(); // alias
//env << *rtspClient << "Received RTCP \"BYE\" on \"" << *subsession << "\" subsession\n";
// Now act as if the subsession had closed:
subsessionAfterPlaying(subsession);
}
void CLiveRtspClientSession::streamTimerHandler(void* clientData)
{
ourRTSPClient* rtspClient = (ourRTSPClient*)clientData;
StreamClientState& scs = rtspClient->scs; // alias
scs.streamTimerTask = NULL;
scs.m_bSendTeardown = false; //如果没有收到回复,就不发送关闭
// Shut down the stream:
//shutdownStream(rtspClient);
rtspClient->sendGetParameterCommand(*(rtspClient->scs.session), CLiveRtspClientSession::sessionAfterGetParam, NULL);
DBGPrint(M_RtspClientLib, BREAK_LEVEL,"%s:+++++sendGetParameterCommand, m_RtspUrl<%s>, Start KeepAlive", __FUNCTION__, scs.m_RtspUrl);
}
void CLiveRtspClientSession::sessionAfterGetParam(RTSPClient* rtspClient, int resultCode, char* resultString)
{
Boolean success = False;
UsageEnvironment& env = rtspClient->envir(); // alias
StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias
do {
if (resultCode != 0)
{
DBGPrint(M_RtspClientLib, ERROR_LEVEL,"Failed to start playing Code<%d> session:%s ", resultCode, resultString);
break;
}
// Set a timer to be handled at the end of the stream's expected duration (if the stream does not already signal its end
// using a RTCP "BYE"). This is optional. If, instead, you want to keep the stream active - e.g., so you can later
// 'seek' back within it and do another RTSP "PLAY" - then you can omit this code.
// (Alternatively, if you don't want to receive the entire stream, you could set this timer for some shorter value.)
if (scs.duration > 0)
{
unsigned uSecsToDelay = (unsigned)(scs.duration*1000000);
scs.streamTimerTask = env.taskScheduler().scheduleDelayedTask(uSecsToDelay, (TaskFunc*)streamTimerHandler, rtspClient);
}
DBGPrint(M_RtspClientLib, BREAK_LEVEL,"%s:++++++++++++++++++=Get Device Param Sucess!", __FUNCTION__);
success = True;
scs.m_bSendTeardown = true;
} while (0);
delete[] resultString;
if (!success)
{
// An unrecoverable error occurred with this stream.
shutdownStream(rtspClient);
}
}
void CLiveRtspClientSession::shutdownStream(RTSPClient* rtspClient, int exitCode)
{
// UsageEnvironment& env = rtspClient->envir(); // alias
StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias
DBGPrint(M_RtspClientLib, BREAK_LEVEL,"%s: Closing <%s>the stream is Send Teardown<%d>!", __FUNCTION__, scs.m_RtspUrl, (int)scs.m_bSendTeardown);
if (scs.m_bSendTeardown == false)
{
return;
}
// First, check whether any subsessions have still to be closed:
if (scs.session != NULL)
{
Boolean someSubsessionsWereActive = False;
MediaSubsessionIterator iter(*scs.session);
MediaSubsession* subsession;
while ((subsession = iter.next()) != NULL)
{
if (subsession->sink != NULL)
{
Medium::close(subsession->sink);
subsession->sink = NULL;
if (subsession->rtcpInstance() != NULL)
{
subsession->rtcpInstance()->setByeHandler(NULL, NULL); // in case the server sends a RTCP "BYE" while handling "TEARDOWN"
}
someSubsessionsWereActive = True;
}
}
DBGPrint(M_RtspClientLib, BREAK_LEVEL,"%s: Closing <%s>the stream Sucess, m_bSendTeardown<%d>, someSubsessionsWereActive<%d>!", __FUNCTION__, scs.m_RtspUrl, (int)scs.m_bSendTeardown, (int)someSubsessionsWereActive);
//if (someSubsessionsWereActive)
{
// Send a RTSP "TEARDOWN" command, to tell the server to shutdown the stream.
// Don't bother handling the response to the "TEARDOWN".
rtspClient->sendTeardownCommand(*scs.session, NULL);
}
}
Medium::close(rtspClient);
scs.m_bSendTeardown = false;
}