live555学习-FramedSource详解

FramedSource讲解

FramedSource是一个抽象类,继承自mediaSource继承自medium,里面有纯虚函数virtual void doGetNextFrame() = 0;

主要作用就是从文件中获得数据,只是获得数据,交给分析H264or5VideoStreamParser分析获得NALU

关于有关牵涉到文件操作的基本都是直接或者间接继承自该类:例如H26VideoFileSourceMediaSubsession

主要函数流程:获得下一包数据getNextFrame

void FramedSource::getNextFrame(unsigned char* to, unsigned maxSize,
				afterGettingFunc* afterGettingFunc,
				void* afterGettingClientData,
				onCloseFunc* onCloseFunc,
				void* onCloseClientData) {
  // Make sure we're not already being read:
  if (fIsCurrentlyAwaitingData) {
    envir() << "FramedSource[" << this << "]::getNextFrame(): attempting to read more than once at the same time!\n";
    envir().internalError();
  }

  fTo = to;//存放读取到数据的内存指针
  fMaxSize = maxSize;//允许的最大数据量,即fTo指针指向的内存区间的大小
  fNumTruncatedBytes = 0; // by default; could be changed by doGetNextFrame()   表示实际读取数据大于<span style="font-family: Arial, Helvetica, sans-serif; font-size: 12px;">fMaxSize的时候,多了多少字节</span>
  fDurationInMicroseconds = 0; // by default; could be changed by doGetNextFrame()
  fAfterGettingFunc = afterGettingFunc;
  fAfterGettingClientData = afterGettingClientData;
  fOnCloseFunc = onCloseFunc;
  fOnCloseClientData = onCloseClientData;
  fIsCurrentlyAwaitingData = True;

  doGetNextFrame();
}

由此可见,此函数初始化了几个必要参数,并调用了纯虚函数doGetNextFrame();此函数留给派生类实现,例如ByteStreamFileSource类实现如下

void ByteStreamFileSource::doGetNextFrame() {
  if (feof(fFid) || ferror(fFid) || (fLimitNumBytesToStream && fNumBytesToStream == 0)) {
    handleClosure();
    return;
  }

#ifdef READ_FROM_FILES_SYNCHRONOUSLY
  doReadFromFile();
#else
  if (!fHaveStartedReading) {
    // Await readable data from the file:
    envir().taskScheduler().turnOnBackgroundReadHandling(fileno(fFid),
	       (TaskScheduler::BackgroundHandlerProc*)&fileReadableHandler, this);
    fHaveStartedReading = True;
  }
#endif
}
void ByteStreamFileSource::doReadFromFile() {
  // Try to read as many bytes as will fit in the buffer provided (or "fPreferredFrameSize" if less)
  if (fLimitNumBytesToStream && fNumBytesToStream < (u_int64_t)fMaxSize) {
    fMaxSize = (unsigned)fNumBytesToStream;
  }
  if (fPreferredFrameSize > 0 && fPreferredFrameSize < fMaxSize) {
    fMaxSize = fPreferredFrameSize;
  }
#ifdef READ_FROM_FILES_SYNCHRONOUSLY
  fFrameSize = fread(fTo, 1, fMaxSize, fFid);
#else
  if (fFidIsSeekable) {
    fFrameSize = fread(fTo, 1, fMaxSize, fFid);
  } else {
    // For non-seekable files (e.g., pipes), call "read()" rather than "fread()", to ensure that the read doesn't block:
    fFrameSize = read(fileno(fFid), fTo, fMaxSize);
  }
#endif
  if (fFrameSize == 0) {
    handleClosure();
    return;
  }
  fNumBytesToStream -= fFrameSize;

  // Set the 'presentation time':
  if (fPlayTimePerFrame > 0 && fPreferredFrameSize > 0) {
    if (fPresentationTime.tv_sec == 0 && fPresentationTime.tv_usec == 0) {
      // This is the first frame, so use the current time:
      gettimeofday(&fPresentationTime, NULL);
    } else {
      // Increment by the play time of the previous data:
      unsigned uSeconds	= fPresentationTime.tv_usec + fLastPlayTime;
      fPresentationTime.tv_sec += uSeconds/1000000;
      fPresentationTime.tv_usec = uSeconds%1000000;
    }

    // Remember the play time of this data:
    fLastPlayTime = (fPlayTimePerFrame*fFrameSize)/fPreferredFrameSize;
    fDurationInMicroseconds = fLastPlayTime;
  } else {
    // We don't know a specific play time duration for this data,
    // so just record the current time as being the 'presentation time':
    gettimeofday(&fPresentationTime, NULL);
  }

  // Inform the reader that he has data:
#ifdef READ_FROM_FILES_SYNCHRONOUSLY
  // To avoid possible infinite recursion, we need to return to the event loop to do this:
  nextTask() = envir().taskScheduler().scheduleDelayedTask(0,
				(TaskFunc*)FramedSource::afterGetting, this);
#else
  // Because the file read was done from the event loop, we can call the
  // 'after getting' function directly, without risk of infinite recursion:
  FramedSource::afterGetting(this);
#endif
}

在以上函数中关于几个重要参数主要是通过这里传送的nextTask() = envir().taskScheduler().scheduleDelayedTask(0,
(TaskFunc*)FramedSource::afterGetting, this);

FramedSource::afterGetting 再回到FramedSource中,发现afterGetting是个静态函数,所以在这里可以直接的调用,FramedSource中定义如下

void FramedSource::afterGetting(FramedSource* source) {
  source->fIsCurrentlyAwaitingData = False;
      // indicates that we can be read again
      // Note that this needs to be done here, in case the "fAfterFunc"
      // called below tries to read another frame (which it usually will)

  if (source->fAfterGettingFunc != NULL) {
    (*(source->fAfterGettingFunc))(source->fAfterGettingClientData,
				   source->fFrameSize, source->fNumTruncatedBytes,
				   source->fPresentationTime,
				   source->fDurationInMicroseconds);
  }
}

由上可见,afterGetting实际上就是把getNextFrame和doGetNextFrame();中初始化或者改变的参数包装了一下,用nextTask() = envir().taskScheduler().scheduleDelayedTask(0,
(TaskFunc*)FramedSource::afterGetting, this);传递下去

关于参数还有一个叫fFrameSize的,此参数代表实际读取的字节数,肯定这个数值要小于fMaxSize





  





  • 2
    点赞
  • 11
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
非常抱歉,之前的回答中的代码仍然有错误。以下是经过修正的代码,已经将 ServerMediaSession 的名称修改为 "stream",并将 RTSP URL 改为 `rtsp://127.0.0.1:8554/stream`: ```cpp #include <liveMedia/liveMedia.hh> #include <BasicUsageEnvironment/BasicUsageEnvironment.hh> #include <Groupsock/GroupsockHelper.hh> class VideoSource : public FramedSource { public: static VideoSource* createNew(UsageEnvironment& env) { return new VideoSource(env); } void setVideoData(char* data, unsigned int length) { fFrameSize = length; memcpy(fTo, data, length); fFrameCount++; if (fPresentationTime.tv_sec == 0 && fPresentationTime.tv_usec == 0) { gettimeofday(&fPresentationTime, NULL); } } private: VideoSource(UsageEnvironment& env) : FramedSource(env), fFrameSize(0), fFrameCount(0) { gettimeofday(&fPresentationTime, NULL); } virtual ~VideoSource() {} virtual void doGetNextFrame() { if (fFrameSize > 0) { // 创建一个新的帧 if (fFrameCount > 0) { // 计算下一帧的时间戳 fPresentationTime.tv_usec += 33333; // 每帧33毫秒(30帧每秒) if (fPresentationTime.tv_usec >= 1000000) { fPresentationTime.tv_sec++; fPresentationTime.tv_usec -= 1000000; } } // 发送帧数据给客户端 FramedSource::afterGetting(this); } else { // 数据还未准备好,等待100毫秒后再次尝试获取帧数据 envir().taskScheduler().scheduleDelayedTask(100 * 1000, (TaskFunc*)doGetNextFrame, this); } } virtual void doStopGettingFrames() {} private: unsigned int fFrameSize; unsigned int fFrameCount; }; int main(int argc, char** argv) { // 创建一个 RTSP 服务器环境 TaskScheduler* scheduler = BasicTaskScheduler::createNew(); UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler); RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554); // 创建一个 RTSP 流媒体服务器 ServerMediaSession* sms = ServerMediaSession::createNew(*env, "stream", "Live stream"); // 添加视频源 VideoSource* videoSource = VideoSource::createNew(*env); H264VideoStreamFramer* videoStreamFramer = H264VideoStreamFramer::createNew(*env, videoSource); sms->addSubsession(PassiveServerMediaSubsession::createNew(*videoStreamFramer)); rtspServer->addServerMediaSession(sms); // 启动 RTSP 服务器 env->taskScheduler().doEventLoop(); // 清理资源 Medium::close(rtspServer); env->reclaim(); delete scheduler; return 0; } ``` 非常抱歉之前的错误带来的困扰,感谢您的耐心和理解。如果您还有其他问题,请随时提问。
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值