简介:RTSP 在体系结构上位于 RTP 和 RTCP 之上, 其使用 TCP 或 UDP 完成数据的传输; HTTP 与 RTSP 相比, HTTP 请求由客户机发出, 服务器作出响应, 使用 RTSP 时, 客户机和服务器都可以发出请求, 即RTSP 可以是双向的; RTSP 是用来控制声音或影像多媒体串流协议, 并允许同时多个串流需求控制, 传输时所用的网络通信协定并不在其定义范围内。 RTSP 协议默认端口: 554, 默认承载协议为 TCP。
1.使用source insight建立工程 (这里就略过了...)
2.直接上代码:// LIVE555MediaServer.cpp
#include <BasicUsageEnvironment.hh>
#include "DynamicRTSPServer.hh"
#include "version.hh"
int main(int argc, char** argv) {
//一:(准备)开始设置使用前环境
// Begin by setting up our usage environment:
//1.1 TaskScheduler用于任务
TaskScheduler* scheduler = BasicTaskScheduler::createNew();
//1.2 UsageEnvironment用于输出
UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);
UserAuthenticationDatabase* authDB = NULL;
#ifdef ACCESS_CONTROL
//1.3 实现客户端访问RTSP服务器权限控制(即客户端通过密码才能访问服务器)
// To implement client access control to the RTSP server, do the following:
authDB = new UserAuthenticationDatabase;
authDB->addUserRecord("username1", "password1"); // replace these with real strings
// Repeat the above with each <username>, <password> that you wish to allow
// access to the server.
#endif
// Create the RTSP server. Try first with the default port number (554),
// and then with the alternative port number (8554):
//二:建立 RTSPServer、使用默认端口(554)
RTSPServer* rtspServer;
portNumBits rtspServerPortNum = 554;
//三:创建rtspServer实例
rtspServer = DynamicRTSPServer::createNew(*env, rtspServerPortNum, authDB);
if (rtspServer == NULL) {
//3.1 重新设置端口号8554、再次创建rtspServer实例
rtspServerPortNum = 8554;
rtspServer = DynamicRTSPServer::createNew(*env, rtspServerPortNum, authDB);
}
if (rtspServer == NULL) {
//3.2 如果再次创建rtspServer实例失败,打印输出信息,并退出程序
*env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
exit(1);
}
//四:rtspServer创建成功,打印输出,譬如Server名称、版本号、版本更新日期
*env << "LIVE555 Media Server\n";
*env << "\tversion " << MEDIA_SERVER_VERSION_STRING
<< " (LIVE555 Streaming Media library version "
<< LIVEMEDIA_LIBRARY_VERSION_STRING << ").\n";
char* urlPrefix = rtspServer->rtspURLPrefix();
*env << "Play streams from this server using the URL\n\t" //客户端连接服务器的使用方法
<< urlPrefix << "<filename>\nwhere <filename> is a file present in the current directory.\n"; //下面是访问支持的文件类型
*env << "Each file's type is inferred from its name suffix:\n";
*env << "\t\".264\" => a H.264 Video Elementary Stream file\n";
*env << "\t\".265\" => a H.265 Video Elementary Stream file\n";
*env << "\t\".aac\" => an AAC Audio (ADTS format) file\n";
*env << "\t\".ac3\" => an AC-3 Audio file\n";
*env << "\t\".amr\" => an AMR Audio file\n";
*env << "\t\".dv\" => a DV Video file\n";
*env << "\t\".m4e\" => a MPEG-4 Video Elementary Stream file\n";
*env << "\t\".mkv\" => a Matroska audio+video+(optional)subtitles file\n";
*env << "\t\".mp3\" => a MPEG-1 or 2 Audio file\n";
*env << "\t\".mpg\" => a MPEG-1 or 2 Program Stream (audio+video) file\n";
*env << "\t\".ogg\" or \".ogv\" or \".opus\" => an Ogg audio and/or video file\n";
*env << "\t\".ts\" => a MPEG Transport Stream file\n";
*env << "\t\t(a \".tsx\" index file - if present - provides server 'trick play' support)\n";
*env << "\t\".vob\" => a VOB (MPEG-2 video with AC-3 audio) file\n";
*env << "\t\".wav\" => a WAV Audio file\n";
*env << "\t\".webm\" => a WebM audio(Vorbis)+video(VP8) file\n";
*env << "See http://www.live555.com/mediaServer/ for additional documentation.\n";
// Also, attempt to create a HTTP server for RTSP-over-HTTP tunneling.
// Try first with the default HTTP port (80), and then with the alternative HTTP
// port numbers (8000 and 8080).
if (rtspServer->setUpTunnelingOverHTTP(80) || rtspServer->setUpTunnelingOverHTTP(8000) || rtspServer->setUpTunnelingOverHTTP(8080)) {
*env << "(We use port " << rtspServer->httpServerPortNum() << " for optional RTSP-over-HTTP tunneling, or for HTTP live streaming (for indexed Transport Stream files only).)\n";
} else {
*env << "(RTSP-over-HTTP tunneling is not available.)\n";
}
//五:进入一个循环
env->taskScheduler().doEventLoop(); // does not return
return 0; // only to prevent compiler warning
}
追进去看createNew,这部分后续补充......
#include "DynamicRTSPServer.hh"
#include <liveMedia.hh>
#include <string.h>
DynamicRTSPServer*
DynamicRTSPServer::createNew(UsageEnvironment& env, Port ourPort,
UserAuthenticationDatabase* authDatabase,
unsigned reclamationTestSeconds) {
//一:创建socket网络文件描述符ourSocket (底层还应该包括bind(...)、listen(...))
int ourSocket = setUpOurSocket(env, ourPort);
if (ourSocket == -1) return NULL; //检错机制
return new DynamicRTSPServer(env, ourSocket, ourPort, authDatabase, reclamationTestSeconds);
}
DynamicRTSPServer::DynamicRTSPServer(UsageEnvironment& env, int ourSocket,
Port ourPort,
UserAuthenticationDatabase* authDatabase, unsigned reclamationTestSeconds)
: RTSPServerSupportingHTTPStreaming(env, ourSocket, ourPort, authDatabase, reclamationTestSeconds) {
}
DynamicRTSPServer::~DynamicRTSPServer() {
}
static ServerMediaSession* createNewSMS(UsageEnvironment& env,
char const* fileName, FILE* fid); // forward
//二:查找服务器上一个媒体文件并打开等一系列的处理机制
ServerMediaSession* DynamicRTSPServer
::lookupServerMediaSession(char const* streamName, Boolean isFirstLookupInSession) {
// First, check whether the specified "streamName" exists as a local file:
FILE* fid = fopen(streamName, "rb");
//2.1 如果返回文件指针不为空,则被打开的streamName存在
Boolean fileExists = fid != NULL;
// Next, check whether we already have a "ServerMediaSession" for this file:
//2.2 检查是否存在ServerMediaSession
ServerMediaSession* sms = RTSPServer::lookupServerMediaSession(streamName);
Boolean smsExists = sms != NULL;
// Handle the four possibilities for "fileExists" and "smsExists":
//2.3 fileExists不存在,ServerMediaSession存在,则删之
if (!fileExists) {
if (smsExists) {
// "sms" was created for a file that no longer exists. Remove it:
removeServerMediaSession(sms);
sms = NULL;
}
return NULL;
}
else {
//2.4 两者同时存在,则删除ServerMediaSession,防止通过其它方式改变存在的文件
if (smsExists && isFirstLookupInSession) {
// Remove the existing "ServerMediaSession" and create a new one, in case the underlying
// file has changed in some way:
removeServerMediaSession(sms);
sms = NULL;
}
if (sms == NULL) {
//2.5 添加ServerMediaSession
sms = createNewSMS(envir(), streamName, fid);
addServerMediaSession(sms);
}
//关闭网络文件描述符fid
fclose(fid);
return sms;
}
}
//下面是处理Matroska、ogg文件的专用代码,见识有限,不知道什么是Matroska、ogg
// Special code for handling Matroska files:
struct MatroskaDemuxCreationState {
MatroskaFileServerDemux* demux;
char watchVariable;
};
static void onMatroskaDemuxCreation(MatroskaFileServerDemux* newDemux, void* clientData) {
MatroskaDemuxCreationState* creationState = (MatroskaDemuxCreationState*)clientData;
creationState->demux = newDemux;
creationState->watchVariable = 1;
}
// END Special code for handling Matroska files:
// Special code for handling Ogg files:
struct OggDemuxCreationState {
OggFileServerDemux* demux;
char watchVariable;
};
static void onOggDemuxCreation(OggFileServerDemux* newDemux, void* clientData) {
OggDemuxCreationState* creationState = (OggDemuxCreationState*)clientData;
creationState->demux = newDemux;
creationState->watchVariable = 1;
}
// END Special code for handling Ogg files:
#define NEW_SMS(description) do {\
char const* descStr = description\
", streamed by the LIVE555 Media Server";\
sms = ServerMediaSession::createNew(env, fileName, fileName, descStr);\
} while(0)
static ServerMediaSession* createNewSMS(UsageEnvironment& env,
char const* fileName, FILE* /*fid*/) {
// Use the file name extension to determine the type of "ServerMediaSession":
//三:获取文件扩展名,以"."开始
char const* extension = strrchr(fileName, '.');
if (extension == NULL) return NULL;
ServerMediaSession* sms = NULL;
Boolean const reuseSource = False;
//3.1 由文件后缀名确定
流媒体文件 的类型,然后createNew去处理,后面流程基本上是这样一个过程
if (strcmp(extension, ".aac") == 0) { // Assumed to be an AAC Audio (ADTS format) file: NEW_SMS("AAC Audio"); sms->addSubsession(ADTSAudioFileServerMediaSubsession::createNew(env, fileName, reuseSource)); } else if (strcmp(extension, ".amr") == 0) { // Assumed to be an AMR Audio file: NEW_SMS("AMR Audio"); sms->addSubsession(AMRAudioFileServerMediaSubsession::createNew(env, fileName, reuseSource)); } else if (strcmp(extension, ".ac3") == 0) { // Assumed to be an AC-3 Audio file: NEW_SMS("AC-3 Audio"); sms->addSubsession(AC3AudioFileServerMediaSubsession::createNew(env, fileName, reuseSource)); } else if (strcmp(extension, ".m4e") == 0) { // Assumed to be a MPEG-4 Video Elementary Stream file: NEW_SMS("MPEG-4 Video"); sms->addSubsession(MPEG4VideoFileServerMediaSubsession::createNew(env, fileName, reuseSource)); } else if (strcmp(extension, ".264") == 0) { // Assumed to be a H.264 Video Elementary Stream file: NEW_SMS("H.264 Video"); OutPacketBuffer::maxSize = 100000; // allow for some possibly large H.264 frames sms->addSubsession(H264VideoFileServerMediaSubsession::createNew(env, fileName, reuseSource)); } else if (strcmp(extension, ".265") == 0) { // Assumed to be a H.265 Video Elementary Stream file: NEW_SMS("H.265 Video"); OutPacketBuffer::maxSize = 100000; // allow for some possibly large H.265 frames sms->addSubsession(H265VideoFileServerMediaSubsession::createNew(env, fileName, reuseSource)); } else if (strcmp(extension, ".mp3") == 0) { // Assumed to be a MPEG-1 or 2 Audio file: NEW_SMS("MPEG-1 or 2 Audio"); // To stream using 'ADUs' rather than raw MP3 frames, uncomment the following://#define STREAM_USING_ADUS 1 // To also reorder ADUs before streaming, uncomment the following://#define INTERLEAVE_ADUS 1 // (For more information about ADUs and interleaving, // see <http://www.live555.com/rtp-mp3/>) Boolean useADUs = False; Interleaving* interleaving = NULL;#ifdef STREAM_USING_ADUS useADUs = True;#ifdef INTERLEAVE_ADUS unsigned char interleaveCycle[] = {0,2,1,3}; // or choose your own... unsigned const interleaveCycleSize = (sizeof interleaveCycle)/(sizeof (unsigned char)); interleaving = new Interleaving(interleaveCycleSize, interleaveCycle);#endif#endif sms->addSubsession(MP3AudioFileServerMediaSubsession::createNew(env, fileName, reuseSource, useADUs, interleaving)); } else if (strcmp(extension, ".mpg") == 0) { // Assumed to be a MPEG-1 or 2 Program Stream (audio+video) file: NEW_SMS("MPEG-1 or 2 Program Stream"); MPEG1or2FileServerDemux* demux = MPEG1or2FileServerDemux::createNew(env, fileName, reuseSource); sms->addSubsession(demux->newVideoServerMediaSubsession()); sms->addSubsession(demux->newAudioServerMediaSubsession()); } else if (strcmp(extension, ".vob") == 0) { // Assumed to be a VOB (MPEG-2 Program Stream, with AC-3 audio) file: NEW_SMS("VOB (MPEG-2 video with AC-3 audio)"); MPEG1or2FileServerDemux* demux = MPEG1or2FileServerDemux::createNew(env, fileName, reuseSource); sms->addSubsession(demux->newVideoServerMediaSubsession()); sms->addSubsession(demux->newAC3AudioServerMediaSubsession()); } else if (strcmp(extension, ".ts") == 0) { // Assumed to be a MPEG Transport Stream file: // Use an index file name that's the same as the TS file name, except with ".tsx": unsigned indexFileNameLen = strlen(fileName) + 2; // allow for trailing "x\0" char* indexFileName = new char[indexFileNameLen]; sprintf(indexFileName, "%sx", fileName); NEW_SMS("MPEG Transport Stream"); sms->addSubsession(MPEG2TransportFileServerMediaSubsession::createNew(env, fileName, indexFileName, reuseSource)); delete[] indexFileName; } else if (strcmp(extension, ".wav") == 0) { // Assumed to be a WAV Audio file: NEW_SMS("WAV Audio Stream"); // To convert 16-bit PCM data to 8-bit u-law, prior to streaming, // change the following to True: Boolean convertToULaw = False; sms->addSubsession(WAVAudioFileServerMediaSubsession::createNew(env, fileName, reuseSource, convertToULaw)); } else if (strcmp(extension, ".dv") == 0) { // Assumed to be a DV Video file // First, make sure that the RTPSinks' buffers will be large enough to handle the huge size of DV frames (as big as 288000). OutPacketBuffer::maxSize = 300000; NEW_SMS("DV Video"); sms->addSubsession(DVVideoFileServerMediaSubsession::createNew(env, fileName, reuseSource)); } else if (strcmp(extension, ".mkv") == 0 || strcmp(extension, ".webm") == 0) { // Assumed to be a Matroska file (note that WebM ('.webm') files are also Matroska files) OutPacketBuffer::maxSize = 100000; // allow for some possibly large VP8 or VP9 frames NEW_SMS("Matroska video+audio+(optional)subtitles"); // Create a Matroska file server demultiplexor for the specified file. // (We enter the event loop to wait for this to complete.) MatroskaDemuxCreationState creationState; creationState.watchVariable = 0; MatroskaFileServerDemux::createNew(env, fileName, onMatroskaDemuxCreation, &creationState); env.taskScheduler().doEventLoop(&creationState.watchVariable); ServerMediaSubsession* smss; while ((smss = creationState.demux->newServerMediaSubsession()) != NULL) { sms->addSubsession(smss); } } else if (strcmp(extension, ".ogg") == 0 || strcmp(extension, ".ogv") == 0 || strcmp(extension, ".opus") == 0) { // Assumed to be an Ogg file NEW_SMS("Ogg video and/or audio"); // Create a Ogg file server demultiplexor for the specified file. // (We enter the event loop to wait for this to complete.) OggDemuxCreationState creationState; creationState.watchVariable = 0; OggFileServerDemux::createNew(env, fileName, onOggDemuxCreation, &creationState); env.taskScheduler().doEventLoop(&creationState.watchVariable); ServerMediaSubsession* smss; while ((smss = creationState.demux->newServerMediaSubsession()) != NULL) { sms->addSubsession(smss); } } return sms;}总结:参考博客上的一些文章辅助分析,可能是因为版本差异,代码上还是有很多的地方明显不一样,代码经过了多次的迭代...还好,分析起来不算难,但是细节地方挺多的....