1、包含必须的头文件。
extern "C" {
#include <libavformat/avformat.h>
}
2、示例。
int main() {
const char* inputFilePath = "path/to/local/video.mp4"; // 本地视频文件路径
const char* outputUrl = "rtsp://remote-server-ip/stream"; // 远程RTSP服务器地址
AVFormatContext* inputFormatContext = nullptr;
AVOutputFormat* outputFormat = nullptr;
AVFormatContext* outputFormatContext = nullptr;
AVPacket packet;
// 打开本地视频文件
if (avformat_open_input(&inputFormatContext, inputFilePath, nullptr, nullptr) != 0) {
fprintf(stderr, "Error: Couldn't open input file.\n");
return -1;
}
// 寻找输入流信息
if (avformat_find_stream_info(inputFormatContext, nullptr) < 0) {
fprintf(stderr, "Error: Couldn't find stream information.\n");
return -1;
}
// 创建输出格式上下文
avformat_alloc_output_context2(&outputFormatContext, nullptr, "rtsp", outputUrl);
if (!outputFormatContext) {
fprintf(stderr, "Error: Couldn't allocate output format context.\n");
return -1;
}
// 设置输出流参数
outputFormat = outputFormatContext->oformat;
for (int i = 0; i < inputFormatContext->nb_streams; i++) {
AVStream* inputStream = inputFormatContext->streams[i];
// 创建输出流
AVStream* outputStream = avformat_new_stream(outputFormatContext, inputStream->codec->codec);
if (!outputStream) {
fprintf(stderr, "Error: Couldn't create output stream.\n");
return -1;
}
// 复制流参数
if (avcodec_copy_context(outputStream->codec, inputStream->codec) < 0) {
fprintf(stderr, "Error: Couldn't copy codec context.\n");
return -1;
}
// 设置编码延迟
outputStream->codec->flags |= AV_CODEC_FLAG_LOW_DELAY;
outputStream->codec->codec_tag = 0;
}
// 打开输出URL
if (!(outputFormat->flags & AVFMT_NOFILE)) {
if (avio_open2(&outputFormatContext->pb, outputUrl, AVIO_FLAG_WRITE, nullptr, nullptr) < 0) {
fprintf(stderr, "Error: Couldn't open output URL.\n");
return -1;
}
}
// 写入头部信息
if (avformat_write_header(outputFormatContext, nullptr) < 0) {
fprintf(stderr, "Error: Couldn't write header.\n");
return -1;
}
// 循环读取视频帧并推送到远程RTSP服务器
while (av_read_frame(inputFormatContext, &packet) >= 0) {
AVStream* inputStream = inputFormatContext->streams[packet.stream_index];
AVStream* outputStream = outputFormatContext->streams[packet.stream_index];
// 设置时间基和时间戳
packet.pts = av_rescale_q_rnd(packet.pts, inputStream->time_base, outputStream->time_base, AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX);
packet.dts = av_rescale_q_rnd(packet.dts, inputStream->time_base, outputStream->time_base, AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX);
packet.duration = av_rescale_q(packet.duration, inputStream->time_base, outputStream->time_base);
packet.pos = -1;
// 写入帧数据
if (av_interleaved_write_frame(outputFormatContext, &packet) < 0) {
fprintf(stderr, "Error: Couldn't write frame.\n");
return -1;
}
av_packet_unref(&packet);
}
// 写入文件尾部信息
av_write_trailer(outputFormatContext);
// 清理资源
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return 0;
}
在上述代码中,设置了outputStream->codec->flags |= AV_CODEC_FLAG_LOW_DELAY;
来启用编码延迟的低延迟标志。这有助于减少编码的延迟。