window 和 ubuntu 系统下分别搭建 rtsp和rtmp 推拉流环境
/ 学习记录,有错误之处欢迎指出 /
一、windows下搭建rtmp(或hls或flv)环境:obs/ffmpeg推流 + nginx
1.0 使用obs推流
obs软件使用可参考链接obs下载。
obs打开后,新建场景和视频捕获设备,会自动识别到usb摄像头或者是前置摄像头。
打开设置中的直播选项,选择自定义模式,在URL中填入想要推送的nginx服务器地址。
本机可写为:
rtmp://127.0.0.1:1935/live 或rtmp://localhost:1935/live
推流码可以不写(若自定义了直播码,在拉流时需在URL后加上该码)
点击开始直播,即可推流。
1.1下载 ffmpeg 并推流
安装ffmpeg可参考链接 ffmpeg安装。
1)安装好后命令行进入bin所在目录(并将该目录加入环境变量的用户变量中)
2)使用如下命令查看视频采集设备代码
ffmpeg -list_devices true -f dshow -i dummy
3)
推流程序示例:
ffmpeg -f dshow -i video="Logi C270 HD WebCam" -s 960x540 -r 150 -c:v h264 -preset ultrafast -tune zerolatency -an -f flv rtmp://192.168.180.1:1935/live/home
"Logi C270 HD WebCam"//视频采集设备代码,可换成其他的
-s 960x540 // 分辨率
-r 150 //帧率
-c:v h264 //编码协议
!!!!!!若换成硬编码:
-c:v h264_amf //本机的gpu为amd ,若想换成265就。。
-preset ultrafast -tune zerolatency // 速度控制 和延时控制
-an //只推送视频流
1.2 安装 nginx服务器
1)下载链接nginx下载
选择 版本 nginx-1.7.11.3-Gryphon
!!!若用该版本解压后要将文件名中的空格换掉
/n
2) 下载 nginx-rtmp扩展模块nginx-rtmp-moudle下载
解压到nginx-1.7.11.3-Gryphon文件夹下
3)更改配置文件 /conf/nginx.conf
给http配置外添加rtmp的直播端口
#user nobody;
# multiple workers works !
worker_processes 2;
#error_log logs/error.log;
#error_log logs/error.log notice;
#error_log logs/error.log info;
#pid logs/nginx.pid;
events {
worker_connections 8192;
# max value 32768, nginx recycling connections+registry optimization =
# this.value * 20 = max concurrent connections currently tested with one worker
# C1000K should be possible depending there is enough ram/cpu power
# multi_accept on;
}
rtmp {
server {
listen 1935;#监听端口,若被占用,可以更改
chunk_size 4000;#上传flv文件块儿的大小
application live { #创建一个叫live的应用
live on;#开启live的应用
allow publish 127.0.0.1;#
allow play all;
}
}
}
http {
#include /nginx/conf/naxsi_core.rules;
include mime.types;
default_type application/octet-stream;
#log_format main '$remote_addr:$remote_port - $remote_user [$time_local] "$request" '
# '$status $body_bytes_sent "$http_referer" '
# '"$http_user_agent" "$http_x_forwarded_for"';
#access_log logs/access.log main;
# # loadbalancing PHP
# upstream myLoadBalancer {
# server 127.0.0.1:9001 weight=1 fail_timeout=5;
# server 127.0.0.1:9002 weight=1 fail_timeout=5;
# server 127.0.0.1:9003 weight=1 fail_timeout=5;
# server 127.0.0.1:9004 weight=1 fail_timeout=5;
# server 127.0.0.1:9005 weight=1 fail_timeout=5;
# server 127.0.0.1:9006 weight=1 fail_timeout=5;
# server 127.0.0.1:9007 weight=1 fail_timeout=5;
# server 127.0.0.1:9008 weight=1 fail_timeout=5;
# server 127.0.0.1:9009 weight=1 fail_timeout=5;
# server 127.0.0.1:9010 weight=1 fail_timeout=5;
# least_conn;
# }
sendfile off;
#tcp_nopush on;
server_names_hash_bucket_size 128;
## Start: Timeouts ##
client_body_timeout 10;
client_header_timeout 10;
keepalive_timeout 30;
send_timeout 10;
keepalive_requests 10;
## End: Timeouts ##
#gzip on;
server {
listen 80;
server_name localhost;
#charset koi8-r;
#access_log logs/host.access.log main;
## Caching Static Files, put before first location
#location ~* \.(jpg|jpeg|png|gif|ico|css|js)$ {
# expires 14d;
# add_header Vary Accept-Encoding;
#}
# For Naxsi remove the single # line for learn mode, or the ## lines for full WAF mode
location / {
#include /nginx/conf/mysite.rules; # see also http block naxsi include line
##SecRulesEnabled;
##DeniedUrl "/RequestDenied";
##CheckRule "$SQL >= 8" BLOCK;
##CheckRule "$RFI >= 8" BLOCK;
##CheckRule "$TRAVERSAL >= 4" BLOCK;
##CheckRule "$XSS >= 8" BLOCK;
root html;
index index.html index.htm;
}
# For Naxsi remove the ## lines for full WAF mode, redirect location block used by naxsi
##location /RequestDenied {
## return 412;
##}
## Lua examples !
# location /robots.txt {
# rewrite_by_lua '
# if ngx.var.http_host ~= "localhost" then
# return ngx.exec("/robots_disallow.txt");
# end
# ';
# }
#error_page 404 /404.html;
# redirect server error pages to the static page /50x.html
#
error_page 500 502 503 504 /50x.html;
location = /50x.html {
root html;
}
# proxy the PHP scripts to Apache listening on 127.0.0.1:80
#
#location ~ \.php$ {
# proxy_pass http://127.0.0.1;
#}
# pass the PHP scripts to FastCGI server listening on 127.0.0.1:9000
#
#location ~ \.php$ {
# root html;
# fastcgi_pass 127.0.0.1:9000; # single backend process
# fastcgi_pass myLoadBalancer; # or multiple, see example above
# fastcgi_index index.php;
# fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name;
# include fastcgi_params;
#}
# deny access to .htaccess files, if Apache's document root
# concurs with nginx's one
#
#location ~ /\.ht {
# deny all;
#}
}
# another virtual host using mix of IP-, name-, and port-based configuration
#
#server {
# listen 8000;
# listen somename:8080;
# server_name somename alias another.alias;
# location / {
# root html;
# index index.html index.htm;
# }
#}
# HTTPS server
#
#server {
# listen 443 ssl spdy;
# server_name localhost;
# ssl on;
# ssl_certificate cert.pem;
# ssl_certificate_key cert.key;
# ssl_session_timeout 5m;
# ssl_prefer_server_ciphers On;
# ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
# ssl_ciphers ECDH+AESGCM:ECDH+AES256:ECDH+AES128:ECDH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:!aNULL:!eNULL:!MD5:!DSS:!EXP:!ADH:!LOW:!MEDIUM;
# location / {
# root html;
# index index.html index.htm;
# }
#}
}
!!!!!!!!!!!!!!!!!!!!!!该文件夹的路径不能有中文
4)双击 nginx.exe即可启动
登录localhost
或 127.0.0.1
出现nginx界面即可
5)拉流测试
可使用VLC的打开网络串流进行捕获或者用程序拉流。
???不知为何,用vlc和ffplay 拉流会比用c++ opencv 拉流程序延时大不少,不知是否是解码还是渲染的问题,有知道的朋友不吝赐教???
二、windows下搭建rtsp 环境: ffmpeg推流 + rtsp-simple-sever
- ffmpeg 安装方法与上面一样,在进行rtsp推流时,需要更改推流命令
- 下载 rtsp-simple-server 链接
3)解压后的.exe直接执行
三、Ubuntu下安装 rtmp环境:ffmpeg + nginx
1)ubuntu下安装ffmpeg
sudo apt update
sudo apt install ffmpeg
ffmpeg -version
或者安装特定版本的ffmpeg,因为在有的时候opencv版本需要的ffmpeg版本不一样。
sudo add-apt-repository ppa:savoury1/ffmpeg4
sudo apt update
sudo apt install ffmpeg libavcodec58 libavformat58 libavutil56 libswscale5
举例上面安装了一个ffmpeg4.X 会自动更新其他版本的ffmpeg,无需手动添加路径
2)ubuntu下安装nginx
sudo apt update
sudo apt install nginx
安装 rtmp扩展
sudo apt-get install libnginx-mod-rtmp
nginx的配置文默认在
/etc/nginx/
编辑 nginx.conf 添加
rtmp {
server {
listen 1935;
chunk_size 4096;
application live {
live on;
record off;
}
}
}
打开nginx服务:sudo systemctl start nginx
(nginx在linux中应该是开机自启动的,可以通过 ps -ef | grep nginx
查看nginx进程情况 ,)
关闭nginx进程:
sudo systemctl stop nginx
关闭开机自启动
sudo systemctl disable nginx
3)查看usb摄像头的端口号
v4l2-ctl --list-devices
//查看摄像头的输出格式
v4l2-ctl -d /dev/video0 --list-formats-ext --all
4)ffmpeg推流命令举例
ffmpeg -f v4l2 -input_format mjpeg -video_size 640x480 -i /dev/video4 -vcodec libx264 -r 30 -preset ultrafast -tune zerolatency -b:v 900k -f flv rtmp://192.168.180.1:1935/live/home
-f v4l2 //video4linux2视频输入格式 ,通常用于从摄像头捕获视频流
-input_format mjpeg //摄像头视频流的帧内编码格式,无需解码直接进行264编码。
// 可以不显示指定该参数
/dev/video4 //是设备号
-b:v 900k //是码率
5)拉流测试同1.5
四 ubuntu下使用ffmpeg api c++开发
由于为在上面已经用apt安装了ffmpeg,为了防止冲突 ,使用apt purge
卸载了
extern "C"
{
#include "build/include/libavformat/avformat.h"
#include "build/include/libavcodec/avcodec.h"
#include "build/include/libavutil/time.h"
}
#include <iostream>
void printErr(int errNum);
static double r2d(AVRational r)
{
return (r.num == 0 || r.den == 0) ? 0.0 : (double)r.num / (double)r.den;
}
int main()
{
// 初始化网络库以及网络加密协议相关的库
avformat_network_init();
// 打开文件, 解封文件头
AVFormatContext *inContext = NULL;
char *inFile = "001.mp4";
int ret = avformat_open_input(&inContext, inFile, 0, 0);
if (ret != 0)
{
printErr(ret);
return -1;
}
std::cout << "avformat_open_input success" << std::endl;
// 获取音视频流信息
ret = avformat_find_stream_info(inContext, 0);
if (ret != 0)
{
printErr(ret);
return -1;
}
std::cout << "avformat_find_stream_info success" << std::endl;
// 打印信息
av_dump_format(inContext, 0, inFile, 0);
// 输出流
AVFormatContext *outContext;
char *outFile = "rtmp://192.168.206.131:10088/live";
ret = avformat_alloc_output_context2(&outContext, 0, "flv", outFile);
if (outContext == 0)
{
printErr(ret);
return -1;
}
std::cout << "avformat_alloc_output_context2 success" << std::endl;
// 配置输出流
// 遍历输入的AVStream
for (int i = 0; i < inContext->nb_streams; i++)
{
AVStream *outStream = avformat_new_stream(outContext, NULL);
if (outStream == NULL)
{
printErr(-1);
return -1;
}
// 复制配置信息
ret = avcodec_parameters_copy(outStream->codecpar, inContext->streams[i]->codecpar);
if (ret < 0)
{
printErr(ret);
return -1;
}
outStream->codecpar->codec_tag = 0;
}
// 输出
av_dump_format(outContext, 0, outFile, 1);
// rtmp推流
// 打开io
ret = avio_open(&outContext->pb, outFile, AVIO_FLAG_WRITE);
if (ret != 0)
{
printErr(ret);
return -1;
}
std::cout << "avio_open success" << std::endl;
// 写入头信息
ret = avformat_write_header(outContext, NULL);
if (ret < 0)
{
printErr(ret);
return -1;
}
std::cout << "avformat_write_header success" << std::endl;
// 推流帧数据
AVPacket avpkt;
long long startTime = av_gettime();
while (1)
{
// 读帧数据
ret = av_read_frame(inContext, &avpkt);
if (ret != 0)
{
break;
}
std::cout << avpkt.pts << std::endl;
// 计算转换pts dts
AVRational inTime = inContext->streams[avpkt.stream_index]->time_base;
AVRational outTime = outContext->streams[avpkt.stream_index]->time_base;
avpkt.pts = av_rescale_q_rnd(avpkt.pts, inTime, outTime, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
avpkt.dts = av_rescale_q_rnd(avpkt.dts, inTime, outTime, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
avpkt.duration = av_rescale_q_rnd(avpkt.duration, inTime, outTime, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
avpkt.pos = -1;
// 处理视频帧,控制推流速度
if (inContext->streams[avpkt.stream_index]->codecpar->codec_type = AVMEDIA_TYPE_VIDEO)
{
// 时间基数
AVRational tb = inContext->streams[avpkt.stream_index]->time_base;
long long nowTime = av_gettime() - startTime;
long long dtsTime = avpkt.dts * (1000 * 1000 * r2d(tb));
if (dtsTime > nowTime)
{
av_usleep(dtsTime - nowTime);
}
}
// 发送数据
ret = av_interleaved_write_frame(outContext, &avpkt);
if (ret < 0)
{
printErr(ret);
return -1;
}
// 释放
av_packet_unref(&avpkt);
}
system("pause");
return 0;
}
void printErr(int errNum)
{
char errBuf[1024] = {0};
av_strerror(errNum, errBuf, sizeof(errBuf));
system("pause");
}
问题1:发现引入的头文件是以前安装opencv的ffmpeg头文件,位于/usr/include/目录下,遂将头文件改成绝对路径格式,以区分
问题2:make编译的时候会报如下错误:
undefined reference to :************
需要在头文件上加入 extern "C"声明
extern "C"
{
#include "build/include/libavformat/avformat.h"
#include "build/include/libavcodec/avcodec.h"
#include "build/include/libavutil/time.h"
}
3)添加CMakeLists.txt
cmake_minimum_required(VERSION 3.1)
project(ImageShow)
# 设置 C++ 标准
set(CMAKE_CXX_STANDARD 11)
# set(CMAKE_BUILD_TYPE Debug)
# 添加 FFmpeg 的头文件路径
include_directories(/ffmpeg-dev/)
# 添加 FFmpeg 的库文件路径
link_directories(/ffmpeg-dev/build/lib)
# 编译选项
add_compile_options(-std=c++11 -g)
# 获取所有的 cpp 文件
file(GLOB LIBPATH2 "./*.cpp")
# 生成可执行文件
add_executable(main.o ${LIBPATH2})
# 获取所有的动态库文件
file(GLOB LIBPATH3 "/ffmpeg-dev/build/lib/lib*.so")
# file(GLOB LIBPATH4 "/ffmpeg-dev/build/lib/lib*.a")
# 链接共享库
target_link_libraries(main.o PRIVATE ${LIBPATH3} )
# 链接 FFmpeg 库
# target_link_libraries(main.o PRIVATE avformat avcodec avutil)