当RTSPServer收到对某个媒体的DESCRIBE请求时,它会找到对应的ServerMediaSession,调用ServerMediaSession::generateSDPDescription()。generateSDPDescription()中会遍历调用ServerMediaSession中所有的调用ServerMediaSubsession,通过subsession->sdpLines()取得每个Subsession的sdp,合并成一个完整的SDP返回之。
我们几乎可以断定,文件的打开和分析应该是在每个Subsession的sdpLines()函数中完成的,看看这个函数:
- char const* OnDemandServerMediaSubsession::sdpLines()
- {
- if (fSDPLines == NULL) {
- // We need to construct a set of SDP lines that describe this
- // subsession (as a unicast stream). To do so, we first create
- // dummy (unused) source and "RTPSink" objects,
- // whose parameters we use for the SDP lines:
- unsigned estBitrate;
- FramedSource* inputSource = createNewStreamSource(0, estBitrate);
- if (inputSource == NULL)
- return NULL; // file not found
- struct in_addr dummyAddr;
- dummyAddr.s_addr = 0;
- Groupsock dummyGroupsock(envir(), dummyAddr, 0, 0);
- unsigned char rtpPayloadType = 96 + trackNumber() - 1; // if dynamic
- RTPSink* dummyRTPSink = createNewRTPSink(&dummyGroupsock,
- rtpPayloadType, inputSource);
- setSDPLinesFromRTPSink(dummyRTPSink, inputSource, estBitrate);
- Medium::close(dummyRTPSink);
- closeStreamSource(inputSource);
- }
- return fSDPLines;
- }
- FramedSource* H264VideoFileServerMediaSubsession::createNewStreamSource(
- unsigned /*clientSessionId*/,
- unsigned& estBitrate)
- {
- estBitrate = 500; // kbps, estimate
- // Create the video source:
- ByteStreamFileSource* fileSource = ByteStreamFileSource::createNew(envir(),
- fFileName);
- if (fileSource == NULL)
- return NULL;
- fFileSize = fileSource->fileSize();
- // Create a framer for the Video Elementary Stream:
- return H264VideoStreamFramer::createNew(envir(), fileSource);
- }
- RTPSink* H264VideoFileServerMediaSubsession::createNewRTPSink(
- Groupsock* rtpGroupsock,
- unsigned char rtpPayloadTypeIfDynamic,
- FramedSource* /*inputSource*/)
- {
- return H264VideoRTPSink::createNew(envir(), rtpGroupsock,
- rtpPayloadTypeIfDynamic);
- }
- void OnDemandServerMediaSubsession::setSDPLinesFromRTPSink(
- RTPSink* rtpSink,
- FramedSource* inputSource,
- unsigned estBitrate)
- {
- if (rtpSink == NULL)
- return;
- char const* mediaType = rtpSink->sdpMediaType();
- unsigned char rtpPayloadType = rtpSink->rtpPayloadType();
- struct in_addr serverAddrForSDP;
- serverAddrForSDP.s_addr = fServerAddressForSDP;
- char* const ipAddressStr = strDup(our_inet_ntoa(serverAddrForSDP));
- char* rtpmapLine = rtpSink->rtpmapLine();
- char const* rangeLine = rangeSDPLine();
- char const* auxSDPLine = getAuxSDPLine(rtpSink, inputSource);
- if (auxSDPLine == NULL)
- auxSDPLine = "";
- char const* const sdpFmt = "m=%s %u RTP/AVP %d\r\n"
- "c=IN IP4 %s\r\n"
- "b=AS:%u\r\n"
- "%s"
- "%s"
- "%s"
- "a=control:%s\r\n";
- unsigned sdpFmtSize = strlen(sdpFmt) + strlen(mediaType) + 5 /* max short len */
- + 3 /* max char len */
- + strlen(ipAddressStr) + 20 /* max int len */
- + strlen(rtpmapLine) + strlen(rangeLine) + strlen(auxSDPLine)
- + strlen(trackId());
- char* sdpLines = new char[sdpFmtSize];
- sprintf(sdpLines, sdpFmt, mediaType, // m= <media>
- fPortNumForSDP, // m= <port>
- rtpPayloadType, // m= <fmt list>
- ipAddressStr, // c= address
- estBitrate, // b=AS:<bandwidth>
- rtpmapLine, // a=rtpmap:... (if present)
- rangeLine, // a=range:... (if present)
- auxSDPLine, // optional extra SDP line
- trackId()); // a=control:<track-id>
- delete[] (char*) rangeLine;
- delete[] rtpmapLine;
- delete[] ipAddressStr;
- fSDPLines = strDup(sdpLines);
- delete[] sdpLines;
- }
- char const* OnDemandServerMediaSubsession::getAuxSDPLine(
- RTPSink* rtpSink,
- FramedSource* /*inputSource*/)
- {
- // Default implementation:
- return rtpSink == NULL ? NULL : rtpSink->auxSDPLine();
- }
- char const* H264VideoFileServerMediaSubsession::getAuxSDPLine(
- RTPSink* rtpSink,
- FramedSource* inputSource)
- {
- if (fAuxSDPLine != NULL)
- return fAuxSDPLine; // it's already been set up (for a previous client)
- if (fDummyRTPSink == NULL) { // we're not already setting it up for another, concurrent stream
- // Note: For H264 video files, the 'config' information ("profile-level-id" and "sprop-parameter-sets") isn't known
- // until we start reading the file. This means that "rtpSink"s "auxSDPLine()" will be NULL initially,
- // and we need to start reading data from our file until this changes.
- fDummyRTPSink = rtpSink;
- // Start reading the file:
- fDummyRTPSink->startPlaying(*inputSource, afterPlayingDummy, this);
- // Check whether the sink's 'auxSDPLine()' is ready:
- checkForAuxSDPLine(this);
- }
- envir().taskScheduler().doEventLoop(&fDoneFlag);
- return fAuxSDPLine;
- }
- void H264VideoFileServerMediaSubsession::checkForAuxSDPLine1()
- {
- char const* dasl;
- if (fAuxSDPLine != NULL) {
- // Signal the event loop that we're done:
- setDoneFlag();
- } else if (fDummyRTPSink != NULL
- && (dasl = fDummyRTPSink->auxSDPLine()) != NULL) {
- fAuxSDPLine = strDup(dasl);
- fDummyRTPSink = NULL;
- // Signal the event loop that we're done:
- setDoneFlag();
- } else {
- // try again after a brief delay:
- int uSecsToDelay = 100000; // 100 ms
- nextTask() = envir().taskScheduler().scheduleDelayedTask(uSecsToDelay,
- (TaskFunc*) checkForAuxSDPLine, this);
- }
- }