openrtsp 当成tcpmp插件时内存管理

#include "playCommon.h"
#include "BasicUsageEnvironment.hh"
#include "GroupsockHelper.hh"
#include <assert.h>
//--------------------------------------------------
Boolean sendOptionsRequest = TRUE;
Boolean sendOptionsRequestOnly = False;
portNumBits tunnelOverHTTPPortNum = 0;
Authenticator* ourAuthenticator = NULL;
//--------------------------------------------------
void continueAfterOPTIONS(RTSPClient* client, int resultCode, char* resultString);
void continueAfterDESCRIBE(RTSPClient* client, int resultCode, char* resultString);
void continueAfterSETUP(RTSPClient* client, int resultCode, char* resultString);
void continueAfterPLAY(RTSPClient* client, int resultCode, char* resultString);
void continueAfterTEARDOWN(RTSPClient* client, int resultCode, char* resultString);
void subsessionAfterPlaying(void* clientData);
void subsessionByeHandler(void* clientData);
void sessionAfterPlaying();
void setupStreams();
void failFun();
void StopRtsp();
void close_rtsp();
void teardownRTSPorSIPSession();
//--------------------------------------------------
typedef struct openrtsp_t
{
openrtsp_t()
{
   scheduler = NULL;
   env = NULL;
   rtspClient = NULL;
   session = NULL;
   duration = 0;
   durationSlop = -1.0;
   initialSeekTime = 0.0f;
   scale = 1.0f;
   endTime = 0;
   setupIter = NULL;
}
TaskScheduler* scheduler;
UsageEnvironment* env;
RTSPClient* rtspClient;
MediaSession* session;
MediaSubsessionIterator* setupIter;
unsigned short desiredPortNum;
double duration;
double durationSlop; // extra seconds to play at the end
double initialSeekTime;
float scale;
double endTime;
HANDLE hThread;
}openrtsp_t;
openrtsp_t g_openrtsp;
void Init()
{
g_openrtsp.scheduler = BasicTaskScheduler::createNew();
g_openrtsp.env = BasicUsageEnvironment::createNew(*g_openrtsp.scheduler);
}
void UnInit()
{
StopRtsp();
}
DWORD rtspThread(LPVOID lp)
{
g_openrtsp.env->taskScheduler().doEventLoop(); // does not return
return 0;
}
int ConnServer(char* strUrl)
{
g_openrtsp.rtspClient = (RTSPClient *)createClient(*g_openrtsp.env, strUrl, 1, "openrtsp.exe");
if (g_openrtsp.rtspClient == NULL) {
   return false;
}
if (sendOptionsRequest) {
   // Begin by sending an "OPTIONS" command:
   getOptions(continueAfterOPTIONS);
} else {
   continueAfterOPTIONS(NULL, 0, NULL);
}
// All subsequent activity takes place within the event loop:
g_openrtsp.hThread = CreateThread(0,0,(LPTHREAD_START_ROUTINE)rtspThread,NULL,0/*CREATE_SUSPENDED*/,0);
return true;
}
void rtsp_data_deal(unsigned char *,unsigned int)
{
}
void failFun()
{
g_openrtsp.scheduler->StopEventLoop();
}
//void getOptions(RTSPClient::responseHandler* afterFunc) { 
// g_openrtsp.rtspClient->sendOptionsCommand(afterFunc, ourAuthenticator);
//}
//
//Medium* createClient(UsageEnvironment& env, char const* url, int verbosityLevel, char const* applicationName) {
// extern portNumBits tunnelOverHTTPPortNum;
// return ourRTSPClient = RTSPClient::createNew(env, url, verbosityLevel, applicationName, tunnelOverHTTPPortNum);
//}


void continueAfterPLAY(RTSPClient*, int resultCode, char* resultString) {
if (resultCode != 0) {
   failFun();
   return;
}
/*
if (qosMeasurementIntervalMS > 0) {
   // Begin periodic QOS measurements:
   beginQOSMeasurement();
}
// Figure out how long to delay (if at all) before shutting down, or
// repeating the playing
Boolean timerIsBeingUsed = False;
double secondsToDelay = duration;
if (duration > 0) {
   timerIsBeingUsed = True;
   double absScale = scale > 0 ? scale : -scale; // ASSERT: scale != 0
   secondsToDelay = duration/absScale + durationSlop;
   int64_t uSecsToDelay = (int64_t)(secondsToDelay*1000000.0);
   sessionTimerTask = env->taskScheduler().scheduleDelayedTask(uSecsToDelay, (TaskFunc*)sessionTimerHandler, (void*)NULL);
}
char const* actionString
   = createReceivers? "Receiving streamed data":"Data is being streamed";
if (timerIsBeingUsed) {
   *env << actionString
    << " (for up to " << secondsToDelay
    << " seconds)...\n";
} else {
#ifdef USE_SIGNALS
   pid_t ourPid = getpid();
   *env << actionString
    << " (signal with \"kill -HUP " << (int)ourPid
    << "\" or \"kill -USR1 " << (int)ourPid
    << "\" to terminate)...\n";
#else
   *env << actionString << "...\n";
#endif
}
// Watch for incoming packets (if desired):
checkForPacketArrival(NULL);
checkInterPacketGaps(NULL);
*/
}
void continueAfterSETUP(RTSPClient*, int resultCode, char* resultString) {
if (resultCode != 0) {
   failFun();
   return;
}
// Set up the next subsession, if any:
setupStreams();
}
void continueAfterOPTIONS(RTSPClient*, int resultCode, char* resultString) {
if (sendOptionsRequestOnly) 
{
   if (resultString)
   {
    delete[] resultString;
   }
   if (resultCode != 0) {
    failFun();
   }
   return;
}
if (resultString)
{
   delete[] resultString;
}
// Next, get a SDP description for the stream:
getSDPDescription(continueAfterDESCRIBE);
}
void continueAfterDESCRIBE(RTSPClient*, int resultCode, char* resultString) 
{
if (resultCode != 0) {
   failFun();
   return;
}
char* sdpDescription = resultString;


// Create a media session object from this SDP description:
g_openrtsp.session = MediaSession::createNew(*g_openrtsp.env, sdpDescription);
delete[] sdpDescription;
if (g_openrtsp.session == NULL) {
  
   failFun();
   return;
} else if (!g_openrtsp.session->hasSubsessions()) {
   failFun();
   return;
}
// Then, setup the "RTPSource"s for the session:
MediaSubsessionIterator iter(*g_openrtsp.session);
MediaSubsession *subsession;
Boolean madeProgress = False;
char const* singleMediumToTest = "xxxxx";
while ((subsession = iter.next()) != NULL) {
  
   if (g_openrtsp.desiredPortNum != 0) {
    subsession->setClientPortNum(g_openrtsp.desiredPortNum);
    g_openrtsp.desiredPortNum += 2;
   }
  
   if (!subsession->initiate(-1)) {
   
   } else {
   
    madeProgress = True;
    if (subsession->rtpSource() != NULL) {
     // Because we're saving the incoming data, rather than playing
     // it in real time, allow an especially large time threshold
     // (1 second) for reordering misordered incoming packets:
     unsigned const thresh = 1000000; // 1 second
     subsession->rtpSource()->setPacketReorderingThresholdTime(thresh);
     // Set the RTP source's OS socket buffer size as appropriate - either if we were explicitly asked (using -B),
     // or if the desired FileSink buffer size happens to be larger than the current OS socket buffer size.
     // (The latter case is a heuristic, on the assumption that if the user asked for a large FileSink buffer size,
     // then the input data rate may be large enough to justify increasing the OS socket buffer size also.)
     int socketNum = subsession->rtpSource()->RTPgs()->socketNum();
     unsigned curBufferSize = getReceiveBufferSize(*g_openrtsp.env, socketNum);
    }
   }
}
// Perform additional 'setup' on each subsession, before playing them:
setupStreams();
}
void setupStreams() 
{
MediaSubsession *subsession = NULL;
Boolean madeProgress = False;
Boolean streamUsingTCP = TRUE;


if (g_openrtsp.setupIter == NULL) 
   g_openrtsp.setupIter = new MediaSubsessionIterator(*g_openrtsp.session);
while ((subsession = g_openrtsp.setupIter->next()) != NULL) {
   // We have another subsession left to set up:
   if (subsession->clientPortNum() == 0) continue; // port # was not set
   setupSubsession(subsession, streamUsingTCP, continueAfterSETUP);
   return;
}
// We're done setting up subsessions.
delete g_openrtsp.setupIter;
g_openrtsp.setupIter = NULL;


// Create output files:
   {
    // Create and start "FileSink"s for each subsession:
    madeProgress = False;
    MediaSubsessionIterator iter(*g_openrtsp.session);
    while ((subsession = iter.next()) != NULL) 
    {
     if (subsession->readSource() == NULL) 
      continue; // was not initiated
     // Create an output file for each desired stream:
     FileSink* fileSink;
     if (strcmp(subsession->mediumName(), "audio") == 0 &&
      (strcmp(subsession->codecName(), "AMR") == 0 ||
      strcmp(subsession->codecName(), "AMR-WB") == 0)) {
       assert(0 == 1);
       // For AMR audio streams, we use a special sink that inserts AMR frame hdrs:
       /*fileSink = AMRAudioFileSink::createNew(*env, outFileName,
       fileSinkBufferSize, oneFilePerFrame);*/
     } else if (strcmp(subsession->mediumName(), "video") == 0 &&
      (strcmp(subsession->codecName(), "H264") == 0)) {
       // For H.264 video stream, we use a special sink that insert start_codes:
       //fileSink = H264VideoFileSink::createNew(*env, outFileName,
       //     fileSinkBufferSize, oneFilePerFrame);
       // For H.264 video stream, we use a special sink that insert start_codes:   
       unsigned int num=0;   
       SPropRecord * sps=parseSPropParameterSets(subsession->fmtp_spropparametersets(),num);   
       fileSink = H264VideoFileSink::createNew(*g_openrtsp.env, "d:\h264"/*outFileName*/,   //\\storage card\\h264\\SDMMC
        0, false);   
       struct timeval tv={0,0}; 
       unsigned char buf[4] = {0};
       unsigned int len = 0;
       unsigned char start_code[4] = {0x00, 0x00, 0x00, 0x01};   
       buf[0] = 27;
       fileSink->addData(buf,1,tv);
       memset(buf,0,4);
       fileSink->addData(buf,4,tv);
       len = sps[0].sPropLength + 4;
       memcpy(buf,&len,4);
       fileSink->addData(buf,4,tv);
       fileSink->addData(start_code, 4, tv);   
       fileSink->addData(sps[0].sPropBytes,sps[0].sPropLength,tv);
       buf[0] = 27;
       fileSink->addData(buf,1,tv);
       memset(buf,0,4);
       fileSink->addData(buf,4,tv);
       len = sps[1].sPropLength + 4;
       memcpy(buf,&len,4);
       fileSink->addData(buf,4,tv);
       fileSink->addData(start_code, 4, tv);   
       fileSink->addData(sps[1].sPropBytes,sps[1].sPropLength,tv);
       delete[] sps;   
     } else {
      // Normal case:
      fileSink = FileSink::createNew(*g_openrtsp.env, "d:\\vod"/*outFileName*/,//\\storage card\\video-H264-2\\SDMMC\\video-H264-2
       0, false);
     }
     subsession->sink = fileSink;
    
      if (strcmp(subsession->mediumName(), "video") == 0 &&
       strcmp(subsession->codecName(), "MP4V-ES") == 0 &&
       subsession->fmtp_config() != NULL) 
      {
        // For MPEG-4 video RTP streams, the 'config' information
        // from the SDP description contains useful VOL etc. headers.
        // Insert this data at the front of the output file:
        unsigned configLen;
        unsigned char* configData
         = parseGeneralConfigStr(subsession->fmtp_config(), configLen);
        struct timeval timeNow;
        gettimeofday(&timeNow, NULL);
        fileSink->addData(configData, configLen, timeNow);
        delete[] configData;
      }
      subsession->sink->startPlaying(*(subsession->readSource()),
       subsessionAfterPlaying,
       subsession);
      // Also set a handler to be called if a RTCP "BYE" arrives
      // for this subsession:
      if (subsession->rtcpInstance() != NULL) {
       subsession->rtcpInstance()->setByeHandler(subsessionByeHandler,
        subsession);
      }
      madeProgress = True;
     }
    }
  
// Finally, start playing each subsession, to start the data flow:
if (g_openrtsp.duration == 0) {
   if (g_openrtsp.scale > 0) g_openrtsp.duration = g_openrtsp.session->playEndTime() - 0; // use SDP end time
   else if (g_openrtsp.scale < 0) g_openrtsp.duration = 0;
}
if (g_openrtsp.duration < 0) g_openrtsp.duration = 0.0;
g_openrtsp.endTime = g_openrtsp.initialSeekTime;
if (g_openrtsp.scale > 0) {
   if (g_openrtsp.duration <= 0) g_openrtsp.endTime = -1.0f;
   else g_openrtsp.endTime = g_openrtsp.initialSeekTime + g_openrtsp.duration;
} else {
   g_openrtsp.endTime = g_openrtsp.initialSeekTime - g_openrtsp.duration;
   if (g_openrtsp.endTime < 0) g_openrtsp.endTime = 0.0f;
}
startPlayingSession(g_openrtsp.session, g_openrtsp.initialSeekTime, g_openrtsp.endTime, g_openrtsp.scale, continueAfterPLAY);
}
void subsessionAfterPlaying(void* clientData) {
// Begin by closing this media subsession's stream:
MediaSubsession* subsession = (MediaSubsession*)clientData;
Medium::close(subsession->sink);
subsession->sink = NULL;
// Next, check whether *all* subsessions' streams have now been closed:
MediaSession& session = subsession->parentSession();
MediaSubsessionIterator iter(session);
while ((subsession = iter.next()) != NULL) {
   if (subsession->sink != NULL) return; // this subsession is still active
}
// All subsessions' streams have now been closed
sessionAfterPlaying();
}
void subsessionByeHandler(void* clientData) {
struct timeval timeNow;
gettimeofday(&timeNow, NULL);
//unsigned secsDiff = timeNow.tv_sec - startTime.tv_sec;
MediaSubsession* subsession = (MediaSubsession*)clientData;


// Act now as if the subsession had closed:
subsessionAfterPlaying(subsession);
}
void sessionAfterPlaying() {
  
   // We've been asked to play the stream(s) over again:
   startPlayingSession(g_openrtsp.session, g_openrtsp.initialSeekTime, g_openrtsp.endTime, g_openrtsp.scale, continueAfterPLAY);
}
void StopRtsp()
{
TerminateThread(g_openrtsp.hThread,0);


if (g_openrtsp.rtspClient != NULL && g_openrtsp.session) { 
   MediaSubsessionIterator iter(*(g_openrtsp.session)); 
   MediaSubsession* subsession; 
   while ((subsession = iter.next()) != NULL) { 
    Medium::close(subsession->sink); 
    subsession->sink = NULL; 
    g_openrtsp.rtspClient->sendTeardownCommand(*subsession,NULL,NULL); 
   } 
}
UsageEnvironment* env = NULL; 
TaskScheduler* scheduler = NULL; 
if (g_openrtsp.session != NULL) { 
   env = &(g_openrtsp.session->envir()); 
   scheduler = &(env->taskScheduler()); 
}
Medium::close(g_openrtsp.session); 
// Medium::close(g_openrtsp.rtspClient); 
delete g_openrtsp.rtspClient;
g_openrtsp.rtspClient = NULL;
g_openrtsp.session = NULL;
g_openrtsp.env->reclaim(); 
if (g_openrtsp.env)
{
   delete g_openrtsp.env;
   g_openrtsp.env = NULL;
}
if (g_openrtsp.scheduler)
{
   delete g_openrtsp.scheduler; 
   g_openrtsp.scheduler = NULL;
}


if (g_openrtsp.setupIter)
{
   delete g_openrtsp.setupIter;
   g_openrtsp.setupIter = NULL;
}


}
void close_rtsp() 
{
teardownRTSPorSIPSession();
UsageEnvironment* env = NULL;
TaskScheduler* scheduler = NULL;
if (g_openrtsp.session != NULL) {
   env = &(g_openrtsp.session->envir());
   scheduler = &(env->taskScheduler());
}
Medium::close(g_openrtsp.session);
Medium::close(g_openrtsp.rtspClient);
env->reclaim(); 
delete scheduler;
}
void teardownRTSPorSIPSession() 
{
MediaSession* mediaSession = g_openrtsp.session;
if (mediaSession == NULL) return;
if (g_openrtsp.rtspClient != NULL) {
   MediaSubsessionIterator iter(*mediaSession);
   MediaSubsession* subsession;
   while ((subsession = iter.next()) != NULL) {
    g_openrtsp.rtspClient->teardownMediaSubsession(*subsession);
   }
}
}
ez:
extern "C" void Init();
extern "C" int ConnServer(char* strUrl);
extern "C" void UnInit();
void CRtspDownPCDlg::OnBnClickedOk()
{
// TODO: 在此添加控件通知处理程序代码
//OnOK();
Init();
ConnServer("rtsp://119.4.250.56:80/rtplive/kbws.sdp");
}
void CRtspDownPCDlg::OnBnClickedButton1()
{
// TODO: 在此添加控件通知处理程序代码
UnInit();
}

转自 http://hi.baidu.com/cr0_3/blog/item/7310afdd3b71595995ee3735.html

  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值