目录
大致流程
环境:
名称 | 版本 |
jdk | 1.8 |
ffmpeg | ffmpeg version 4.1 |
javacv (jar包,拉取推送视频) | <dependency> <groupId>org.bytedeco</groupId> <artifactId>javacv-platform</artifactId> <version>1.5.1</version> </dependency> |
alfg/nginx-rtmp (docker安装) | latest |
CentOS 7 安装 ffmpeg
FFmpeg是一套可以用来记录、转换数字音频、视频,并能将其转化为流的开源计算机程序。我们要用它拉取rtmp 转换成可视频,放入nginx。
//1. 下载
wget https://johnvansickle.com/ffmpeg/release-source/ffmpeg-4.1.tar.xz
//2. 解压
tar -xvJf ffmpeg-4.1.tar.xz
//3. 配置在configure存在的文件夹内运行
./configure --prefix=/usr/local/ffmpeg
//如果第3 行的命令打印一下信息,
/* If you think configure made a mistake, make sure you are using the latest
version from Git. If the latest version fails, report the problem to the
ffmpeg-user@ffmpeg.org mailing list or IRC #ffmpeg on irc.freenode.net.
Include the log file "config.log" produced by configure as this will help
solve the problem.*/
--------------------------如果第三行的命令打印以上信息,你就需要安装yasm------------------
//4.1 下载yasm
wget http://www.tortall.net/projects/yasm/releases/yasm-1.3.0.tar.gz
//4.2 解压yasm
tar zxvf yasm-1.3.0.tar.gz -c /usr/local/software/yasm
//4.3 在configure 所在的文件夹内运行下命令
./configure
//4.4 编译,安装 yasm
make
make install
//4.5 修改配置文件
vim /etc/ld.so.conf
//加入一下信息
include ld.so.conf.d/*.conf
/usr/local/ffmpeg/lib/
-------------------------------------------结束-----------------------------------
//6. 编译安装ffmpeg(在ffmpeg文件夹内)
make
make install
//查看是否安装成功
ffmpeg -version
docker安装nginx-rtmp
注意:我们下载的是nginx-rtmp。
这个nginx解析不了rtmp协议
nginx-rtmp配置文件详解
daemon off;
error_log /dev/stdout info;
events {
worker_connections 1024;
}
rtmp {
server {
listen 1935;
chunk_size 4000; #默认流切片大小
#后端会调用该地址推送rtmp流
#地址例子: rtmp://localhost:1935/stream/test
application stream {
live on;
#ffmpeg会使用一下命令 对推送的视频流进行格式转换,以及切片
#切片就是将一段视频切割成多个 ts格式的视频文件。有一个xxx.m3u8的文件管理这些ts
#我们只需要让前端访问这个xxx.m3u8的文件即可播放.ts视频
#并调用rtmp://localhost:1935/hls这个地址保存视频
exec ffmpeg -i rtmp://localhost:1935/stream/$name
-c:a libfdk_aac -b:a 128k -c:v libx264 -b:v 2500k -f flv -g 30 -r 30 -s 1280x720 -preset superfast -profile:v baseline rtmp://localhost:1935/hls/$name_720p2628kbs
#-c:a libfdk_aac -b:a 128k -c:v libx264 -b:v 1000k -f flv -g 30 -r 30 -s 854x480 -preset superfast -profile:v baseline rtmp://localhost:1935/hls/$name_480p1128kbs
#-c:a libfdk_aac -b:a 128k -c:v libx264 -b:v 750k -f flv -g 30 -r 30 -s 640x360 -preset superfast -profile:v baseline rtmp://localhost:1935/hls/$name_360p878kbs
#-c:a libfdk_aac -b:a 128k -c:v libx264 -b:v 400k -f flv -g 30 -r 30 -s 426x240 -preset superfast -profile:v baseline rtmp://localhost:1935/hls/$name_240p528kbs
#-c:a libfdk_aac -b:a 64k -c:v libx264 -b:v 200k -f flv -g 15 -r 15 -s 426x240 -preset superfast -profile:v baseline rtmp://localhost:1935/hls/$name_240p264kbs;
}
application hls {
live on;
hls on;
hls_fragment_naming system;
hls_fragment 5;
hls_playlist_length 10;
#我们推送的流会保存到nginx的目录下/opt/data/hls
hls_path /opt/data/hls;
hls_nested on;
hls_variant _720p2628kbs BANDWIDTH=2628000,RESOLUTION=1280x720;
#hls_variant _480p1128kbs BANDWIDTH=1128000,RESOLUTION=854x480;
#hls_variant _360p878kbs BANDWIDTH=878000,RESOLUTION=640x360;
#hls_variant _240p528kbs BANDWIDTH=528000,RESOLUTION=426x240;
#hls_variant _240p264kbs BANDWIDTH=264000,RESOLUTION=426x240;
}
}
}
http {
root /www/static;
sendfile off;
tcp_nopush on;
server_tokens off;
access_log /dev/stdout combined;
# Uncomment these lines to enable SSL.
# ssl_ciphers HIGH:!aNULL:!MD5;
# ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
# ssl_session_cache shared:SSL:10m;
# ssl_session_timeout 10m;
server {
listen 80;
# Uncomment these lines to enable SSL.
# Update the ssl paths with your own certificate and private key.
# listen 443 ssl;
# ssl_certificate /opt/certs/example.com.crt;
# ssl_certificate_key /opt/certs/example.com.key;
location /hls {
types {
application/vnd.apple.mpegurl m3u8;
video/mp2t ts;
}
root /opt/data;
add_header Cache-Control no-cache;
add_header Access-Control-Allow-Origin *;
}
#前端调用这个地址 播放视频
#例子 http://localhost/live/test.m3u8,就会去/opt/data/hls下找这个test.m3u8的文件
location /live {
alias /opt/data/hls;
types {
application/vnd.apple.mpegurl m3u8;
video/mp2t ts;
}
add_header Cache-Control no-cache;
add_header Access-Control-Allow-Origin *;
}
location /stat {
rtmp_stat all;
rtmp_stat_stylesheet stat.xsl;
}
location /stat.xsl {
root /www/static;
}
location /crossdomain.xml {
default_type text/xml;
expires 24h;
}
}
}
//在root下创建工作目录
mkdir /root/nginx-rtmp/data
mkdir /root/nginx-rtmp/conf #将上面的那个配置文件复制到conf下用来挂载使用
//运行镜像
docker run -p 1935:1935 -p 80:80 --name nginx-rtmp \ #要用1935(默认端口)推流
-v /root/nginx-rtmp/conf/nginx.conf:/etc/nginx/nginx.conf.template \ #挂载配置文件方便修改
-v /root/nginx-rtmp/data:/opt/data/hls \ #nginx默认将我们推送的视频流放到hls所以挂在该目录。方便检查
-d alfg/nginx-rtmp
例子
视频流保存nginx中的数据例子。
这些数据是通过java推送的,java推送的demo在下面
test_XXX等文件夹中保存的数据格式
名字是毫秒单位的时间戳,这些视频新的会替换掉旧的并不会越来越多.,前端调用test.m3u8播放这些ts切片文件
java案例
pom依赖
<dependency>
<groupId>org.bytedeco</groupId>
<artifactId>javacv-platform</artifactId>
<version>1.5.1</version>
</dependency>
代码案例
import lombok.extern.slf4j.Slf4j;
import org.bytedeco.ffmpeg.avcodec.AVPacket;
import org.bytedeco.ffmpeg.avformat.AVFormatContext;
import org.bytedeco.ffmpeg.global.avcodec;
import org.bytedeco.javacv.FFmpegFrameGrabber;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.FrameRecorder;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
@Slf4j
public class ConvertVideoPakcet {
private static final Map<String,ConvertVideoPakcet> convertVideoPakcets = new HashMap<>();
private FFmpegFrameGrabber grabber = null;
private FFmpegFrameRecorder record = null;
private int width = -1, height = -1;
// 视频参数
private int audiocodecid;
private int codecid;
private double framerate;// 帧率
private int bitrate;// 比特率
// 音频参数
private int audioChannels;
private int audioBitrate;
private int sampleRate;
//控制程序循环
private Boolean flag = true;
private static ConvertVideoPakcet get(String deviceId){
return convertVideoPakcets.get(deviceId);
}
public static Boolean start(String deviceId,String formUrl,String toUrl){
if(null != get(deviceId)) return true;
final ConvertVideoPakcet convertVideoPakcet = new ConvertVideoPakcet();
convertVideoPakcets.put(deviceId,convertVideoPakcet);
new Thread(()->{
log.info("start device");
try {
convertVideoPakcet.rtsp(formUrl).rtmp(toUrl).start();
} catch (IOException e) {
log.error("start dvice error,{}",e);
} catch (Exception e) {
e.printStackTrace();
}
}).start();
log.info("start device finish!");
return true;
}
/**
* 停止当前直播
* @param id
* @return
*/
public static Boolean stop(String id){
log.info("stop device ,{}",id);
ConvertVideoPakcet convertVideoPakcet = get(id);
if(null != convertVideoPakcet){
convertVideoPakcets.remove(id);
return convertVideoPakcet.stop();
}
return false;
}
/**
* 拉取摄像头或者其他rtmp视频源
*
* @param src rtsp数据源地址
* @author JW
* @throws Exception
*/
private ConvertVideoPakcet rtsp(String src) throws Exception {
// 采集/抓取器
grabber = new FFmpegFrameGrabber(src);
grabber.setOption("rtsp_transport", "tcp");
grabber.start();// 开始之后ffmpeg会采集视频信息,之后就可以获取音视频信息
if (width < 0 || height < 0) {
width = grabber.getImageWidth();
height = grabber.getImageHeight();
}
// 视频参数
audiocodecid = grabber.getAudioCodec();
log.warn("音频编码:{}",audiocodecid);
codecid = grabber.getVideoCodec();
framerate = grabber.getVideoFrameRate();// 帧率
bitrate = grabber.getVideoBitrate();// 比特率
// 音频参数
// 想要录制音频,这三个参数必须有:audioChannels > 0 && audioBitrate > 0 && sampleRate > 0
audioChannels = grabber.getAudioChannels();
audioBitrate = grabber.getAudioBitrate();
if (audioBitrate < 1) {
audioBitrate = 128 * 1000;// 默认音频比特率
}
return this;
}
/**
* rtmp输出推流到nginx媒体流服务器
*
* @param out t\ rtmp媒体流服务器地址
* @author JW
* @throws IOException
*/
private ConvertVideoPakcet rtmp(String out) throws IOException {
// 录制/推流器
record = new FFmpegFrameRecorder(out, width, height);
record.setVideoOption("crf", "30");
record.setGopSize(2);
record.setFrameRate(framerate);
record.setVideoBitrate(bitrate);
record.setAudioChannels(audioChannels);
record.setAudioBitrate(audioBitrate);
record.setSampleRate(sampleRate);
AVFormatContext fc = null;
// if (out.indexOf("rtmp") >= 0 || out.indexOf("flv") > 0) {
// 封装格式flv
record.setFormat("flv");
record.setAudioCodecName("aac");
record.setVideoCodec(codecid);
fc = grabber.getFormatContext();
// }
record.start(fc);
return this;
}
/**
* 转封装
*
* @author eguid
* @throws IOException
*/
private void start() throws IOException {
//刷新开始的测试数据
if(null != grabber)
grabber.flush();
while (flag) {
AVPacket pkt = null;
try {
// 没有解码的音视频帧
pkt = grabber.grabPacket();
if (pkt == null || pkt.size() <= 0 || pkt.data() == null) {
continue;
}
// 不需要编码直接把音视频帧推出去
record.recordPacket(pkt);
avcodec.av_packet_unref(pkt);
try {
Thread.sleep(0,1000);
} catch (InterruptedException e) {
log.error("推流发生等待异常,{}",e);
}
} catch (Exception e) {
log.error("推流发生异常,{}",e);
}
}
}
private Boolean stop() {
//控制退出循环
flag = false;
if(null != record){
try {
record.release();
} catch (FrameRecorder.Exception e) {
log.error("stop record error ,{}",e);
return false;
}
}
if(null != grabber){
try {
grabber.release();
} catch (Exception e) {
log.error("stop grabber error ,{}",e);
return false;
}
}
return true;
}
public static void main(String[] args) throws Exception, IOException {
// 运行,设置视频源地址,拉取的视频存在的地址
new ConvertVideoPakcet().rtsp("rtmp://media3.scctv.net/live/scctv_800")
//我们要推送到的nginx的地址
.rtmp("rtmp://localhost:1935/stream/test").start();
}
}
前端案例(vue)
//安装依赖
npm install --save hls.js
//标签 案例
<video ref=""videoRef" width="400" controls></video>
<script>
import Hls from 'hls.js';
export default {
mounted: function() {
var hls = new Hls();
hls.loadSource('http://localhost/live/test.m3u8');
hls.attachMedia(this.$refs.videoRef);
hls.on(Hls.Events.MANIFEST_PARSED,function() {
this.$refs.videoRef.play();
});
}
}
</script>
地址详解:
javacv相关文档和博客
一位大佬写的关于javacv的博客
javacv的 接口文档
nginx这个不是必须的,其实也可以在后端切片后。使用websocket长连接向前端推送