补充Live555推实时流

lvrs.h //接口头文件

#ifndef _LVRS_H_
#define _LVRS_H_

#ifdef __cplusplus
#define EXTERN            extern "C"
#define EXTERN_BEGIN      extern "C" {
#define EXTERN_END        }
#else
#define EXTERN            extern
#define EXTERN_BEGIN
#define EXTERN_END
#endif
EXTERN_BEGIN
typedef int (*myGetFrameCB)(int chId,int srcId,char* buf,int size);
EXTERN void* liveVideoServerStart(myGetFrameCB cb);
EXTERN_END
	
#endif

推流类 liveVideoRTSPServer.h

#ifndef _LIVE_VIDEO_RTSP_SERVER_H
#define _LIVE_VIDEO_RTSP_SERVER_H

#ifndef _RTSP_SERVER_SUPPORTING_HTTP_STREAMING_HH
#include "RTSPServerSupportingHTTPStreaming.hh"
#endif
#include <liveMedia.hh>


class liveVideoRTSPServer: public RTSPServerSupportingHTTPStreaming {
public:
  static liveVideoRTSPServer* createNew(Port ourPort,UserAuthenticationDatabase* authDatabase,
  	GetFrameCB cb,unsigned reclamationTestSeconds = 65);

protected:
  liveVideoRTSPServer(UsageEnvironment& env, int ourSocket, Port ourPort,
		    UserAuthenticationDatabase* authDatabase, unsigned reclamationTestSeconds);
  // called only by createNew();
  virtual ~liveVideoRTSPServer();

protected: // redefined virtual functions
  virtual ServerMediaSession* lookupServerMediaSession(char const* streamName);
private:
	GetFrameCB readFreamCb;
public:
	static UsageEnvironment* s_env;
	static UsageEnvironment* getEnv();
};
#endif

liveVideoRTSPServer.cpp liveVideoRTSPServer实现

#include "liveVideoRTSPServer.h"
#include "BasicUsageEnvironment.hh"
#include "lvrs.h"
#include <string.h>
#include <iostream>
using namespace std;


UsageEnvironment* liveVideoRTSPServer::s_env = NULL;

UsageEnvironment* liveVideoRTSPServer::getEnv(){
	if(s_env == NULL){
		cout<< "create s_env"<<endl;
		s_env = BasicUsageEnvironment::createNew(*BasicTaskScheduler::createNew());
		cout<< "create s_env OK!"<<endl;
	}
	return s_env;
}

liveVideoRTSPServer*
liveVideoRTSPServer::createNew(Port ourPort,
			     UserAuthenticationDatabase* authDatabase,GetFrameCB cb,
			     unsigned reclamationTestSeconds) {

  UsageEnvironment& env = * getEnv();
  int ourSocket = setUpOurSocket(env, ourPort);
  
  if (ourSocket == -1) return NULL;
    
  liveVideoRTSPServer* lvrs = new liveVideoRTSPServer(env,ourSocket, ourPort, authDatabase, reclamationTestSeconds);
  lvrs->readFreamCb =cb;
  return lvrs;
}

liveVideoRTSPServer::liveVideoRTSPServer(UsageEnvironment& env, 
					 int ourSocket,Port ourPort,
				     UserAuthenticationDatabase* authDatabase, unsigned reclamationTestSeconds)
  : RTSPServerSupportingHTTPStreaming(env, ourSocket, ourPort, authDatabase, reclamationTestSeconds){
}


liveVideoRTSPServer::~liveVideoRTSPServer(){
}

static ServerMediaSession* createNewSMS(UsageEnvironment& env,
					char const* streamName, GetFrameCB  cb); // forward

ServerMediaSession*
liveVideoRTSPServer::lookupServerMediaSession(char const* streamName) {
  cout<<"liveVideoRTSPServer::lookupServerMediaSession: "<<streamName<<endl;
  // First, check whether the specified "streamName" exists as a local file:
  //FILE* fid = fopen(streamName, "rb");
  //Boolean fileExists = fid != NULL;

  // Next, check whether we already have a "ServerMediaSession" for this file:
  ServerMediaSession* sms = RTSPServer::lookupServerMediaSession(streamName);
  if(sms == NULL){}
  Boolean smsExists = sms != NULL;

  // Handle the four possibilities for "fileExists" and "smsExists":
   //if (smsExists){
      // "sms" was created for a file that no longer exists. Remove it:
    //  removeServerMediaSession(sms);
  // }
   //else 
   	if (!smsExists) {
   		printf("cread sms--------------------> \r\n");
      // Create a new "ServerMediaSession" object for streaming from the named file.
      sms = createNewSMS(envir(), streamName,readFreamCb);
      addServerMediaSession(sms);
    }
    return sms;
}

// Special code for handling Matroska files:
static char newMatroskaDemuxWatchVariable;
static MatroskaFileServerDemux* demux;
static void onMatroskaDemuxCreation(MatroskaFileServerDemux* newDemux, void* /*clientData*/) {
  demux = newDemux;
  newMatroskaDemuxWatchVariable = 1;
}
// END Special code for handling Matroska files:

#define NEW_SMS(description) do {\
char const* descStr = description\
    ", streamed by the LIVE555 Media Server";\
sms = ServerMediaSession::createNew(env, streamName, streamName, descStr);\
} while(0)

static ServerMediaSession* createNewSMS(UsageEnvironment& env,
					char const* streamName, GetFrameCB cb) {
  // Use the file name extension to determine the type of "ServerMediaSession":
  int chId,SrcId;
  int i;
  ServerMediaSession* sms = NULL;
  Boolean const reuseSource = False;
  char const* extension = strrchr(streamName, '/');
  char const* pstr = streamName;
  char chStr[10]={0} ;
  //pstr = streamName;
  if (extension == NULL) return NULL;
  for(i=0;i<strlen(streamName);i++){
  	if(*pstr == '/'){
		break;	
	}
	chStr[i] = *pstr;
	pstr++;
  }
  chStr[i]='\0';
  if(strcmp(chStr,"ch0")){
  	chId =0;
  }else if(strcmp(chStr,"ch1")){
  	chId =0;
  }else return NULL;
  
 
  if (strcmp(extension, "/main") == 0) {
  	SrcId = 0;	
  } else if (strcmp(extension, "/sub") == 0){
  	SrcId = 1;
  }else 
  	return NULL;
  cout<<"create H264LiveVideoServerMediaSubsession"<<endl;
  NEW_SMS("H.264 Live Video");
  OutPacketBuffer::maxSize = 1920*1080*3/2; //6000000;//HIGH*WIDTH *3 /2 -> YUV4:2:0 // allow for some possibly large H.264 frames
  sms->addSubsession(H264LiveVideoServerMediaSubsession::createNew(env,cb, chId,SrcId, reuseSource));

  return sms;
}


void* liveVideoServerStart(myGetFrameCB cb){
	
	RTSPServer* rtspServer;
	portNumBits rtspServerPortNum = 554;
	  // Begin by setting up our usage environment:
  	//TaskScheduler* scheduler = BasicTaskScheduler::createNew();
  	//UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);
  
	rtspServer = liveVideoRTSPServer::createNew(rtspServerPortNum,NULL,(GetFrameCB)cb);

	if (rtspServer == NULL) {

    	rtspServerPortNum = 8554;
    	rtspServer = liveVideoRTSPServer::createNew(rtspServerPortNum, NULL,(GetFrameCB)cb);
  	}
	if (rtspServer == NULL) {
    	*liveVideoRTSPServer::getEnv() << "Failed to create RTSP server: " << liveVideoRTSPServer::getEnv()->getResultMsg() << "\n";
    	exit(1);
  	}

	char* urlPrefix = rtspServer->rtspURLPrefix();
	fprintf(stdout, "use like this:%s", urlPrefix);
	fprintf(stdout, "channel/srcch \n");
	liveVideoRTSPServer::getEnv()->taskScheduler().doEventLoop(); // does not return
	return NULL;
}

H264LiveVideoServerMediaSubsession类实现

#ifndef _H264_LIVE_VIDEO_SERVER_MEDIA_SUBSESSION_HH_
#define _H264_LIVE_VIDEO_SERVER_MEDIA_SUBSESSION_HH_

#include "ByteFrameLiveVideoSource.hh"

#ifndef _ON_DEMAND_SERVER_MEDIA_SUBSESSION_HH
#include "OnDemandServerMediaSubsession.hh"
#endif

class H264LiveVideoServerMediaSubsession : public OnDemandServerMediaSubsession {
public:
	static H264LiveVideoServerMediaSubsession*
	createNew(UsageEnvironment& env,GetFrameCB cb, int mchId,int msrcId,Boolean reuseFirstSource);
	// Used to implement "getAuxSDPLine()":
	void checkForAuxSDPLine1();
	void afterPlayingDummy1();

private: // redefined virtual functions  
	virtual FramedSource* createNewStreamSource(unsigned clientSessionId,  
	          unsigned& estBitrate);

	virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock,  
	                    unsigned char rtpPayloadTypeIfDynamic,  
	                    FramedSource* inputSource);

protected:
	H264LiveVideoServerMediaSubsession(UsageEnvironment& env,
	      GetFrameCB cb, int mchId,int msrcId, Boolean reuseFirstSource);
	// called only by createNew();
	virtual ~H264LiveVideoServerMediaSubsession();

	void setDoneFlag() { fDoneFlag = ~0; }

protected: // redefined virtual functions
	virtual char const* getAuxSDPLine(RTPSink* rtpSink,
	    FramedSource* inputSource);
	
protected:
	//virtual char const* sdpLines(); 
	GetFrameCB tempCb;
	int chId; //0-1
	int srcId;//0 main,1 sub

	char* fAuxSDPLine;
	char fDoneFlag; // used when setting up "fAuxSDPLine"
	RTPSink* fDummyRTPSink; // ditto
};
#endif

#include "H264LiveVideoServerMediaSubsession.hh"
#include "H264VideoStreamFramer.hh"
#include "H264VideoRTPSink.hh"
#include <iostream>
using namespace std;
static void afterPlayingDummy(void* clientData);
static void checkForAuxSDPLine(void* clientData);

H264LiveVideoServerMediaSubsession* H264LiveVideoServerMediaSubsession ::
	createNew(UsageEnvironment& env,GetFrameCB cb,int mchId,int msrcId, Boolean reuseFirstSource){
	return new H264LiveVideoServerMediaSubsession(env,cb,mchId,msrcId,reuseFirstSource);
}

FramedSource* H264LiveVideoServerMediaSubsession ::
	createNewStreamSource(unsigned clientSessionId,unsigned & estBitrate){
	  estBitrate = 500; // kbps, estimate

  // Create the video source:
  if(tempCb != NULL) cout<<"create new stream source------------------>"<<endl; 
  printf("createNewStreamSource--------------====> \r\n");
  ByteFrameLiveVideoSource* liveVideoSource = ByteFrameLiveVideoSource::createNew(envir(),tempCb,chId,srcId);
  return H264VideoStreamFramer::createNew(envir(), liveVideoSource);
}
RTPSink* H264LiveVideoServerMediaSubsession ::
	createNewRTPSink(Groupsock * rtpGroupsock,unsigned char rtpPayloadTypeIfDynamic,
		FramedSource * inputSource){
		cout<<"H264LiveVideoServerMediaSubsession :: createNewRTPSink --------------->" <<endl;
	return H264VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic);
}
H264LiveVideoServerMediaSubsession ::H264LiveVideoServerMediaSubsession(UsageEnvironment& env,
	GetFrameCB cb,int mchId,int msrcId, Boolean reuseFirstSource)
	: OnDemandServerMediaSubsession(env,reuseFirstSource),tempCb(cb),chId(mchId),srcId(msrcId),
	fAuxSDPLine(NULL), fDoneFlag(0), fDummyRTPSink(NULL){
}

H264LiveVideoServerMediaSubsession ::~H264LiveVideoServerMediaSubsession(){
}
char const* H264LiveVideoServerMediaSubsession::getAuxSDPLine(RTPSink* rtpSink, FramedSource* inputSource) {
  if (fAuxSDPLine != NULL) return fAuxSDPLine; // it's already been set up (for a previous client)

  if (fDummyRTPSink == NULL) { // we're not already setting it up for another, concurrent stream
    // Note: For H264 video files, the 'config' information ("profile-level-id" and "sprop-parameter-sets") isn't known
    // until we start reading the file.  This means that "rtpSink"s "auxSDPLine()" will be NULL initially,
    // and we need to start reading data from our file until this changes.
    cout<<"getAuxSDPLine: "<<endl;
    fDummyRTPSink = rtpSink;
	
    // Start reading the file:
    //this have probrem!!!
    fDummyRTPSink->startPlaying(*inputSource, afterPlayingDummy, this);

    // Check whether the sink's 'auxSDPLine()' is ready:
    checkForAuxSDPLine(this);
	cout<<"getAuxSDPLine: ok ? "<<endl;
  }

  envir().taskScheduler().doEventLoop(&fDoneFlag);

  return fAuxSDPLine;
}

static void afterPlayingDummy(void* clientData) {
  H264LiveVideoServerMediaSubsession* subsess = (H264LiveVideoServerMediaSubsession*)clientData;
  subsess->afterPlayingDummy1();
}

void H264LiveVideoServerMediaSubsession::afterPlayingDummy1() {
  // Unschedule any pending 'checking' task:
  envir().taskScheduler().unscheduleDelayedTask(nextTask());
  // Signal the event loop that we're done:
  setDoneFlag();
}
static void checkForAuxSDPLine(void* clientData) {
  H264LiveVideoServerMediaSubsession* subsess = (H264LiveVideoServerMediaSubsession*)clientData;
  subsess->checkForAuxSDPLine1();
}
void H264LiveVideoServerMediaSubsession::checkForAuxSDPLine1() {
  char const* dasl;

  if (fAuxSDPLine != NULL) {
    // Signal the event loop that we're done:
    setDoneFlag();
  } else if (fDummyRTPSink != NULL && (dasl = fDummyRTPSink->auxSDPLine()) != NULL) {
    fAuxSDPLine = strDup(dasl);
    fDummyRTPSink = NULL;

    // Signal the event loop that we're done:
    setDoneFlag();
  } else {
    // try again after a brief delay:
    int uSecsToDelay = 100000; // 100 ms
    nextTask() = envir().taskScheduler().scheduleDelayedTask(uSecsToDelay,
			      (TaskFunc*)checkForAuxSDPLine, this);
  }
}


ByteFrameLiveVideoSource实现

#ifndef _BYTE_FRAME_LIVE_VIDEO_SOURCE_HH_
#define _BYTE_FRAME_LIVE_VIDEO_SOURCE_HH_

#ifndef _FRAMED_SOURCE_HH
#include "FramedSource.hh"
#endif

typedef int (*GetFrameCB)(int chId,int srcId,unsigned char* buf,int size);

class ByteFrameLiveVideoSource: public FramedSource{
public:
 static ByteFrameLiveVideoSource* createNew(UsageEnvironment& env,
 					 GetFrameCB funcCb,int chId=0,int srcId =0,
					 unsigned preferredFrameSize = 0,
					 unsigned playTimePerFrame = 0);

  //void seekToByteAbsolute(u_int64_t byteNumber, u_int64_t numBytesToStream = 0);
    // if "numBytesToStream" is >0, then we limit the stream to that number of bytes, before treating it as EOF
 

protected:
  ByteFrameLiveVideoSource(UsageEnvironment& env,
  			   int mchId,int msrcId,
		       unsigned preferredFrameSize,
		       unsigned playTimePerFrame);
	// called only by createNew()

  virtual ~ByteFrameLiveVideoSource();
	
  static void getFrameableHandler(ByteFrameLiveVideoSource* source, int mask);
  
 void doGetNextFrameFormEncoder();
 
private:
  // redefined virtual functions:
  virtual void doGetNextFrame();
  virtual void doStopGettingFrames();
  GetFrameCB getFrame;
  
private:
  int chId;
  int srcId;  
  unsigned fPreferredFrameSize;
  unsigned fPlayTimePerFrame;
  Boolean fFidIsSeekable;
  unsigned fLastPlayTime;
  Boolean fHaveStartedReading;
  Boolean fLimitNumBytesToStream;
  u_int64_t fNumBytesToStream; // used iff "fLimitNumBytesToStream" is True
};
#endif

#include "ByteFrameLiveVideoSource.hh"
#include "GroupsockHelper.hh"
#include <iostream>
using namespace std;

#define GETFRAME_HANDLER_ID 5006
#define READ_FROM_FILES_SYNCHRONOUSLY

ByteFrameLiveVideoSource*
ByteFrameLiveVideoSource::createNew(UsageEnvironment& env,
				GetFrameCB funcCb,int chId,int srcId,
				unsigned preferredFrameSize,
				unsigned playTimePerFrame) {

  ByteFrameLiveVideoSource* newSource = new ByteFrameLiveVideoSource(env, chId, srcId,preferredFrameSize, playTimePerFrame);
  newSource->getFrame = funcCb;
  return newSource;
}
ByteFrameLiveVideoSource::ByteFrameLiveVideoSource(UsageEnvironment& env,
			 int mchId,int msrcId,
			 unsigned preferredFrameSize,
			 unsigned playTimePerFrame)
	: FramedSource(env),chId(mchId),srcId(msrcId),fPreferredFrameSize(preferredFrameSize),
	  fPlayTimePerFrame(playTimePerFrame), fLastPlayTime(0),
	  fHaveStartedReading(False), fLimitNumBytesToStream(False), fNumBytesToStream(0) {
	  fMaxSize = 1920*1080*3/2;
  }
ByteFrameLiveVideoSource::~ByteFrameLiveVideoSource(){
	
}
void ByteFrameLiveVideoSource::getFrameableHandler(ByteFrameLiveVideoSource* source, int /*mask*/) {
  if (!source->isCurrentlyAwaitingData()) {
    source->doStopGettingFrames(); // we're not ready for the data yet
    return;
  }//
  //cout<<"[debug_msg]#"<<"do read from file"<<endl;
  source->doGetNextFrameFormEncoder();
}
void ByteFrameLiveVideoSource:: doGetNextFrameFormEncoder(){
	//printf("doGetNextFrameFormEncoder================== \r\n");
	// Try to read as many bytes as will fit in the buffer provided (or "fPreferredFrameSize" if less)
	
	if (fLimitNumBytesToStream && fNumBytesToStream < (u_int64_t)fMaxSize) {
		fMaxSize = (unsigned)fNumBytesToStream;
	}
	if (fPreferredFrameSize > 0 && fPreferredFrameSize < fMaxSize) {
		fMaxSize = fPreferredFrameSize;
	}

	cout<<"famxSize: "<<fMaxSize<<endl;
	cout<<"fPreferredFrameSize: " <<fPreferredFrameSize<<endl;
	cout<<"fNumBytesToStream: " <<fNumBytesToStream<<endl;

	//char bufAddr[1080*1920*3/2];
	fFrameSize =0;
	//cout<<"doGetNextFrame"<<endl;
	if(getFrame != NULL){
		//cout<<"doGetNextFrame call back getFrame"<<endl;
		cout<<"fMaxSize: "<<fMaxSize<<endl;
		//callback function get encoder frame-----
		fFrameSize = getFrame(chId,srcId,fTo,fMaxSize); 
	}

	if (fFrameSize == 0) {
		handleClosure(this);
		return;
	} 
	fNumBytesToStream -= fFrameSize;
	// Set the 'presentation time':
	if (fPlayTimePerFrame > 0 && fPreferredFrameSize > 0) {
	if (fPresentationTime.tv_sec == 0 && fPresentationTime.tv_usec == 0) {
		// This is the first frame, so use the current time:
		gettimeofday(&fPresentationTime, NULL);
	} else {
		// Increment by the play time of the previous data:
		unsigned uSeconds = fPresentationTime.tv_usec + fLastPlayTime;
		fPresentationTime.tv_sec += uSeconds/1000000;
		fPresentationTime.tv_usec = uSeconds%1000000;
	}

		// Remember the play time of this data:
		fLastPlayTime = (fPlayTimePerFrame*fFrameSize)/fPreferredFrameSize;
		fDurationInMicroseconds = fLastPlayTime;
	} else {
		// We don't know a specific play time duration for this data,
		// so just record the current time as being the 'presentation time':
		gettimeofday(&fPresentationTime, NULL);
	}

	// Inform the reader that he has data:
#ifdef READ_FROM_FILES_SYNCHRONOUSLY
	// To avoid possible infinite recursion, we need to return to the event loop to do this:
	nextTask() = envir().taskScheduler().scheduleDelayedTask(GETFRAME_HANDLER_ID,
	(TaskFunc*)FramedSource::afterGetting, this);
#else
	// Because the file read was done from the event loop, we can call the
	// 'after getting' function directly, without risk of infinite recursion:
	FramedSource::afterGetting(this);
#endif
}

void ByteFrameLiveVideoSource:: doGetNextFrame()
{
//	printf("ByteFrameLiveVideoSource doGetNextFrame : \r\n");
	if(fLimitNumBytesToStream && fNumBytesToStream == 0) {
		handleClosure(this);
		return;
	}
//	printf("ByteFrameLiveVideoSource doGetNextFrame 1: \r\n");

#ifdef READ_FROM_FILES_SYNCHRONOUSLY
	   //cout<<"[debug_msg]#"<<"do read from file"<<endl;
	   doGetNextFrameFormEncoder();
#else
	   
	if (!fHaveStartedReading) {
	// Await readable data from the file:
		envir().taskScheduler().turnOnBackgroundReadHandling(GETFRAME_HANDLER_ID,
		(TaskScheduler::BackgroundHandlerProc*)&getFrameableHandler, this);
		fHaveStartedReading = True;
	}
#endif

 	  
}

void ByteFrameLiveVideoSource:: doStopGettingFrames() {
	  envir().taskScheduler().unscheduleDelayedTask(nextTask());
#ifndef READ_FROM_FILES_SYNCHRONOUSLY
envir().taskScheduler().turnOffBackgroundReadHandling(GETFRAME_HANDLER_ID);
	  fHaveStartedReading = False;
#endif
}


测试

#include "lvrs.h"
#include <stdio.h>

int readFrame(int chId,int srcId,char* buf,int size){
	printf("get frame fafaslfjljslfs------------ \n"); 
	return 1024;
}
void main(){
	liveVideoServerStart(readFrame);
}
  • 0
    点赞
  • 5
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 1
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

john_liqinghan

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值