live555直播(准备2)-重写doGetNextFrame()和doEventLoop()

<pre name="code" class="cpp">#include "live555/liveMedia.hh"
#include "live555/BasicUsageEnvironment.hh"
#include "live555/FramedSource.hh"
#include "live555/H264VideoFileServerMediaSubsession.hh"
#include "live555/H264VideoStreamFramer.hh"

///H264FramedLiveSource.hh///
//创建类,继承自FramedSource
class H264FramedLiveSource : public FramedSource
{
public:
	static H264FramedLiveSource* createNew(UsageEnvironment& env,
		char const* fileName,
		unsigned preferredFrameSize = 0,
		unsigned playTimePerFrame = 0); 

protected:
	H264FramedLiveSource(UsageEnvironment& env,
		char const* fileName, 
		unsigned preferredFrameSize,
		unsigned playTimePerFrame);
	// called only by createNew()
	~H264FramedLiveSource();

private:
	// redefined virtual functions:
	virtual void doGetNextFrame();
	int TransportData( unsigned char* to, unsigned maxSize );

protected:
    FILE *fp;
};

///H264FramedLiveSource.cpp///
H264FramedLiveSource::H264FramedLiveSource( UsageEnvironment& env,  
	char const* fileName, 
	unsigned preferredFrameSize, 
	unsigned playTimePerFrame )
	: FramedSource(env)
{
     fp = fopen( fileName, "rb" ); 
}

H264FramedLiveSource* H264FramedLiveSource::createNew( UsageEnvironment& env,
	                                       char const* fileName, 
	                                       unsigned preferredFrameSize /*= 0*/, 
	                                       unsigned playTimePerFrame /*= 0*/ )
{ 
	H264FramedLiveSource* newSource = new H264FramedLiveSource(env, fileName, preferredFrameSize, playTimePerFrame);
 
	return newSource;
}

H264FramedLiveSource::~H264FramedLiveSource()
{
	fclose(fp);
}


long filesize(FILE *stream)
{
	long curpos, length;
	curpos = ftell(stream);
	fseek(stream, 0L, SEEK_END);
	length = ftell(stream);
	fseek(stream, curpos, SEEK_SET);
	return length;
}
//会话中获取一个包的时候自动调用以下 ,因为在继承类中也有此方法,
//这里运用了C++的覆盖,形成多态,看调用哪个类来判断调用哪个doGetNextFrame()方法
//因为main中调用的是H264LiveVideoServerMediaSubssion自己建的类,所以调用的是派生类中重写的方法
void H264FramedLiveSource::doGetNextFrame()
{

	if( filesize(fp) >  fMaxSize)
	  fFrameSize = fread(fTo,1,fMaxSize,fp); 
	else
	{
		fFrameSize = fread(fTo,1,filesize(fp),fp);
		fseek(fp, 0, SEEK_SET);
	}
	//fFrameSize = fMaxSize;
	nextTask() = envir().taskScheduler().scheduleDelayedTask( 0,
		(TaskFunc*)FramedSource::afterGetting, this);//表示延迟0秒后再执行 afterGetting 函数
	return;
}

H264LiveVideoServerMediaSubssion.hh
class H264LiveVideoServerMediaSubssion: public H264VideoFileServerMediaSubsession {

public:
  static H264LiveVideoServerMediaSubssion*
  createNew( UsageEnvironment& env,
	           char const* fileName, 
				Boolean reuseFirstSource );

protected: // we're a virtual base class
  H264LiveVideoServerMediaSubssion( UsageEnvironment& env, char const* fileName, Boolean reuseFirstSource );
   ~H264LiveVideoServerMediaSubssion();

protected: // redefined virtual functions
   FramedSource* createNewStreamSource(unsigned clientSessionId,
					      unsigned& estBitrate);
public: 
   char fFileName[100];

};

H264LiveVideoServerMediaSubssion.cpp
H264LiveVideoServerMediaSubssion*
H264LiveVideoServerMediaSubssion::createNew( UsageEnvironment& env,
	                                         char const* fileName, 
											 Boolean reuseFirstSource )
{
	return new H264LiveVideoServerMediaSubssion( env, fileName, reuseFirstSource );
}

H264LiveVideoServerMediaSubssion::H264LiveVideoServerMediaSubssion( UsageEnvironment& env, char const* fileName, Boolean reuseFirstSource )
: H264VideoFileServerMediaSubsession( env, fileName, reuseFirstSource )
{
	strcpy(fFileName,fileName);
}


H264LiveVideoServerMediaSubssion::~H264LiveVideoServerMediaSubssion()
{
}
//创建新会话时候自动调用以下 ,因为在继承类中也有此方法,
//这里运用了C++的覆盖,形成多态,看调用哪个类来判断调用哪个createNewStreamSource方法
//因为main中调用的是H264LiveVideoServerMediaSubssion自己建的类,所以调用的是派生类中重写的方法 
FramedSource* H264LiveVideoServerMediaSubssion::createNewStreamSource( unsigned clientSessionId, unsigned& estBitrate )
{
	/* Remain to do : assign estBitrate */
	estBitrate = 1000; // kbps, estimate

	// Create the video source:
	H264FramedLiveSource* liveSource = H264FramedLiveSource::createNew(envir(), fFileName);
	if (liveSource == NULL)
	{
		return NULL;
	}

	// Create a framer for the Video Elementary Stream:
	return H264VideoStreamFramer::createNew(envir(), liveSource);
}
add by zjk/
//重写doEventLoop()函数,因为如果想在作为服务器的同时再执行点其他的命令,需要在此函数中添加
//因为此函数运行的时候就一直在那里循环了,其他命令必须在此函数中运行才行
class zjk
{
	public:  
	 zjk();
	void doEventLoopzjk(BasicTaskScheduler0* Basicscheduler);
};
void zjk::doEventLoopzjk(BasicTaskScheduler0* Basicscheduler) 
{ 	 // Repeatedly loop, handling readble sockets and timed events:  
   while (1) {
			//printf("zjk\n");
			Basicscheduler->SingleStep();	
			//ADD Sth else		
	 }
}

//H264unicast.cpp/
//创建交互环境,用来打印相关信息的
UsageEnvironment* env;

// To make the second and subsequent client for each stream reuse the same
// input stream as the first client (rather than playing the file from the
// start for each client), change the following "False" to "True":
Boolean reuseFirstSource = False;

// To stream *only* MPEG-1 or 2 video "I" frames
// (e.g., to reduce network bandwidth),
// change the following "False" to "True":
Boolean iFramesOnly = False;

//打印相关信息的函数
static void announceStream(RTSPServer* rtspServer, ServerMediaSession* sms,
			   char const* streamName, char const* inputFileName) {
  char* url = rtspServer->rtspURL(sms);
  UsageEnvironment& env = rtspServer->envir();
  env << "\n\"" << streamName << "\" stream, from the file \""
      << inputFileName << "\"\n";
  env << "Play this stream using the URL \"" << url << "\"\n";
  delete[] url;
}

int main(int argc, char** argv) {
  // Begin by setting up our usage environment:
  // 1.创建任务调度器,createNew其实就是创建类的实例
  //TaskScheduler* scheduler = BasicTaskScheduler::createNew();
  BasicTaskScheduler0* Basicscheduler = BasicTaskScheduler::createNew();
  //想用BasicTaskScheduler0类中的SingleStep()函数,所以上面语句用BasicTaskScheduler0
  //但是下面中用的全是TaskScheduler,所以类型转换一下
  //注意:这里不能创建两个BasicTaskScheduler::createNew(),因为SingleStep()和TaskScheduler需要一个实例的
  TaskScheduler* scheduler;
  scheduler = Basicscheduler;
// 2. 创建交互环境
  env = BasicUsageEnvironment::createNew(*scheduler);
  //以下为权限控制的代码,设置后没有权限的客户端无法进行连接
  UserAuthenticationDatabase* authDB = NULL;
#ifdef ACCESS_CONTROL
  // To implement client access control to the RTSP server, do the following:
  authDB = new UserAuthenticationDatabase;
  authDB->addUserRecord("username1", "password1"); // replace these with real strings
  // Repeat the above with each <username>, <password> that you wish to allow
  // access to the server.
#endif

  // 3. Create the RTSP server:此时就一直处于监听模客户端的连接
  RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554, authDB);
  if (rtspServer == NULL) {
    *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
    exit(1);
  }

  char const* descriptionString
    = "Session streamed by \"testOnDemandRTSPServer\"";

  // Set up each of the possible streams that can be served by the
  // RTSP server.  Each such stream is implemented using a
  // "ServerMediaSession" object, plus one or more
  // "ServerMediaSubsession" objects for each audio/video substream.

  // A H.264 video elementary stream:
  {
    char const* streamName = "H264unicast";//流名字,媒体名
    char const* inputFileName = "test.264";//文件名,当客户端输入的流名字为h264ESVideoTest时,实际上打开的是test.264文件
    // 4.创建媒体会话
    //当客户点播时,要输入流名字streamName,告诉RTSP服务器点播的是哪个流。
    //流名字和文件名的对应关系是通过增加子会话建立起来的(流名字streamName不是文件名inputFileName)。媒体会话对会话描述、会话持续时间、流名字等与会话有关的信息进行管理
    //第二个参数:媒体名、三:媒体信息、四:媒体描述
    ServerMediaSession* sms
      = ServerMediaSession::createNew(*env, streamName, streamName,
				      descriptionString);
    //5.添加264子会话 这里的文件名才是真正打开文件的名字
    //reuseFirstSource:
    //这里的H264VideoFileS...类派生自FileServerMediaSubsession派生自OnDemandServerMediaSubsession
    //而OnDemandServerMediaSubsession和PassiveMediaSubsession共同派生自ServerMediaSubsession
    //关于读取文件之类都在这个类中实现的,如果要将点播改为直播就是要新建类继承此类然后添加新的方法
H264LiveVideoServerMediaSubssion*a=H264LiveVideoServerMediaSubssion   
		       ::createNew(*env, inputFileName, reuseFirstSource);
	sms->addSubsession(a);
     //6.为rtspserver添加session
    rtspServer->addServerMediaSession(sms);
     //打印信息到标准输出
    announceStream(rtspServer, sms, streamName, inputFileName);
  }

  // Also, attempt to create a HTTP server for RTSP-over-HTTP tunneling.
  // Try first with the default HTTP port (80), and then with the alternative HTTP
  // port numbers (8000 and 8080).

 // if (rtspServer->setUpTunnelingOverHTTP(80) || rtspServer->setUpTunnelingOverHTTP(8000) || rtspServer->setUpTunnelingOverHTTP(8080)) {
 //   *env << "\n(We use port " << rtspServer->httpServerPortNum() << " for optional RTSP-over-HTTP tunneling.)\n";
 // } else {
  //  *env << "\n(RTSP-over-HTTP tunneling is not available.)\n";
  //}
  //执行循环方法,来执行循环方法,对套接字的读取事件和对媒体文件的延时发送操作都在这个循环中完成。
  //env->taskScheduler().doEventLoop(); // does not return  跟这条语句相同的意思scheduler->doEventLoop(); 
	zjk *z;
	z->doEventLoopzjk(Basicscheduler);
  return 0; // only to prevent compiler warning
}
<pre name="code" class="cpp">//参考ByteStreamFileSource和H264VideoStreamFramer讲解:http://blog.csdn.net/zhangjikuan/article/details/38554589

 

                
  • 0
    点赞
  • 11
    收藏
    觉得还不错? 一键收藏
  • 1
    评论
非常抱歉,之前的回答中的代码仍然有错误。以下是经过修正的代码,已经将 ServerMediaSession 的名称修改为 "stream",并将 RTSP URL 改为 `rtsp://127.0.0.1:8554/stream`: ```cpp #include <liveMedia/liveMedia.hh> #include <BasicUsageEnvironment/BasicUsageEnvironment.hh> #include <Groupsock/GroupsockHelper.hh> class VideoSource : public FramedSource { public: static VideoSource* createNew(UsageEnvironment& env) { return new VideoSource(env); } void setVideoData(char* data, unsigned int length) { fFrameSize = length; memcpy(fTo, data, length); fFrameCount++; if (fPresentationTime.tv_sec == 0 && fPresentationTime.tv_usec == 0) { gettimeofday(&fPresentationTime, NULL); } } private: VideoSource(UsageEnvironment& env) : FramedSource(env), fFrameSize(0), fFrameCount(0) { gettimeofday(&fPresentationTime, NULL); } virtual ~VideoSource() {} virtual void doGetNextFrame() { if (fFrameSize > 0) { // 创建一个新的帧 if (fFrameCount > 0) { // 计算下一帧的时间戳 fPresentationTime.tv_usec += 33333; // 每帧33毫秒(30帧每秒) if (fPresentationTime.tv_usec >= 1000000) { fPresentationTime.tv_sec++; fPresentationTime.tv_usec -= 1000000; } } // 发送帧数据给客户端 FramedSource::afterGetting(this); } else { // 数据还未准备好,等待100毫秒后再次尝试获取帧数据 envir().taskScheduler().scheduleDelayedTask(100 * 1000, (TaskFunc*)doGetNextFrame, this); } } virtual void doStopGettingFrames() {} private: unsigned int fFrameSize; unsigned int fFrameCount; }; int main(int argc, char** argv) { // 创建一个 RTSP 服务器环境 TaskScheduler* scheduler = BasicTaskScheduler::createNew(); UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler); RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554); // 创建一个 RTSP 流媒体服务器 ServerMediaSession* sms = ServerMediaSession::createNew(*env, "stream", "Live stream"); // 添加视频源 VideoSource* videoSource = VideoSource::createNew(*env); H264VideoStreamFramer* videoStreamFramer = H264VideoStreamFramer::createNew(*env, videoSource); sms->addSubsession(PassiveServerMediaSubsession::createNew(*videoStreamFramer)); rtspServer->addServerMediaSession(sms); // 启动 RTSP 服务器 env->taskScheduler().doEventLoop(); // 清理资源 Medium::close(rtspServer); env->reclaim(); delete scheduler; return 0; } ``` 非常抱歉之前的错误带来的困扰,感谢您的耐心和理解。如果您还有其他问题,请随时提问。
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值