java ffmpeg 实时流_Ubuntu16.04 安装ffmpeg 实现视频流解析

本文详细介绍了如何在Ubuntu 16.04上安装ffmpeg并使用C语言调用ffmpeg库解析rtsp视频流,并将流存储为ppm格式图片。首先,通过添加源、更新源和安装ffmpeg完成软件的安装。然后,展示了一段C语言代码,用于读取rtsp流、解码并存储为图片。最后,提到了编译和运行代码所需的依赖库及步骤。
摘要由CSDN通过智能技术生成

一.ffmpeg安装

第一步:添加源。

sudo add-apt-repository ppa:djcj/hybrid

第二步:更新源。

sudo apt-get update

第三步:下载安装。

sudo apt-get install ffmpeg

第四步:验证。

sudo ffmpeg -version

二.C语言调用ffmpeg库实现rtsp视频流解析并存储为ppm格式图片

1.参考:

https://stackoverflow.com/questions/10715170/receiving-rtsp-stream-using-ffmpeg-library

2.代码:my_streamer.cpp

#include

#include

#include

#include

#include

extern "C" {

#include

#include

#include

#include

}

int main(int argc, char** argv) {

// Open the initial context variables that are needed

SwsContext *img_convert_ctx;

AVFormatContext* format_ctx = avformat_alloc_context();

AVCodecContext* codec_ctx = NULL;

int video_stream_index;

// Register everything

av_register_all();

avformat_network_init();

//open RTSP

if (avformat_open_input(&format_ctx, "rtsp://134.169.178.187:8554/h264.3gp",

NULL, NULL) != 0) {

return EXIT_FAILURE;

}

if (avformat_find_stream_info(format_ctx, NULL) < 0) {

return EXIT_FAILURE;

}

//search video stream

for (int i = 0; i < format_ctx->nb_streams; i++) {

if (format_ctx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)

video_stream_index = i;

}

AVPacket packet;

av_init_packet(&packet);

//open output file

AVFormatContext* output_ctx = avformat_alloc_context();

AVStream* stream = NULL;

int cnt = 0;

//start reading packets from stream and write them to file

av_read_play(format_ctx); //play RTSP

// Get the codec

AVCodec *codec = NULL;

codec = avcodec_find_decoder(AV_CODEC_ID_H264);

if (!codec) {

exit(1);

}

// Add this to allocate the context by codec

codec_ctx = avcodec_alloc_context3(codec);

avcodec_get_context_defaults3(codec_ctx, codec);

avcodec_copy_context(codec_ctx, format_ctx->streams[video_stream_index]->codec);

std::ofstream output_file;

if (avcodec_open2(codec_ctx, codec, NULL) < 0)

exit(1);

img_convert_ctx = sws_getContext(codec_ctx->width, codec_ctx->height,

codec_ctx->pix_fmt, codec_ctx->width, codec_ctx->height, AV_PIX_FMT_RGB24,

SWS_BICUBIC, NULL, NULL, NULL);

int size = avpicture_get_size(AV_PIX_FMT_YUV420P, codec_ctx->width,

codec_ctx->height);

uint8_t* picture_buffer = (uint8_t*) (av_malloc(size));

AVFrame* picture = av_frame_alloc();

AVFrame* picture_rgb = av_frame_alloc();

int size2 = avpicture_get_size(AV_PIX_FMT_RGB24, codec_ctx->width,

codec_ctx->height);

uint8_t* picture_buffer_2 = (uint8_t*) (av_malloc(size2));

avpicture_fill((AVPicture *) picture, picture_buffer, AV_PIX_FMT_YUV420P,

codec_ctx->width, codec_ctx->height);

avpicture_fill((AVPicture *) picture_rgb, picture_buffer_2, AV_PIX_FMT_RGB24,

codec_ctx->width, codec_ctx->height);

while (av_read_frame(format_ctx, &packet) >= 0 && cnt < 1000) { //read ~ 1000 frames

std::cout << "1 Frame: " << cnt << std::endl;

if (packet.stream_index == video_stream_index) { //packet is video

std::cout << "2 Is Video" << std::endl;

if (stream == NULL) { //create stream in file

std::cout << "3 create stream" << std::endl;

stream = avformat_new_stream(output_ctx,

format_ctx->streams[video_stream_index]->codec->codec);

avcodec_copy_context(stream->codec,

format_ctx->streams[video_stream_index]->codec);

stream->sample_aspect_ratio =

format_ctx->streams[video_stream_index]->codec->sample_aspect_ratio;

}

int check = 0;

packet.stream_index = stream->id;

std::cout << "4 decoding" << std::endl;

int result = avcodec_decode_video2(codec_ctx, picture, &check, &packet);

std::cout << "Bytes decoded " << result << " check " << check

<< std::endl;

if (cnt > 100) //cnt < 0)

{

sws_scale(img_convert_ctx, picture->data, picture->linesize, 0,

codec_ctx->height, picture_rgb->data, picture_rgb->linesize);

std::stringstream file_name;

file_name << "test" << cnt << ".ppm";

output_file.open(file_name.str().c_str());

output_file << "P3 " << codec_ctx->width << " " << codec_ctx->height

<< " 255\n";

for (int y = 0; y < codec_ctx->height; y++) {

for (int x = 0; x < codec_ctx->width * 3; x++)

output_file

<< (int) (picture_rgb->data[0]

+ y * picture_rgb->linesize[0])[x] << " ";

}

output_file.close();

}

cnt++;

}

av_free_packet(&packet);

av_init_packet(&packet);

}

av_free(picture);

av_free(picture_rgb);

av_free(picture_buffer);

av_free(picture_buffer_2);

av_read_pause(format_ctx);

avio_close(output_ctx->pb);

avformat_free_context(output_ctx);

return (EXIT_SUCCESS);

}

3.依赖库安装

apt install libavformat-dev

apt install libavcodec-dev

apt install libswresample-dev

apt install libswscale-dev

apt install libavutil-dev

sudo apt-get install libsdl1.2-dev

sudo apt-get install libsdl-image1.2-dev

sudo apt-get install libsdl-mixer1.2-dev

sudo apt-get install libsdl-ttf2.0-dev

sudo apt-get install libsdl-gfx1.2-dev

4.编译

g++ -w my_streamer.cpp -o my_streamer $(pkg-config --cflags --libs libavformat libswscale libavcodec libavutil)

5.运行

./my_streamer

三.C语言调用ffmpeg库实现本地视频播放

1.参考:https://github.com/mpenkov/ffmpeg-tutorial

2.依赖库安装:见上节二.3

3.编译:

git clone https://github.com/mpenkov/ffmpeg-tutorial.git

cd ffmpeg-tutorial

make

4.运行:

./tutorial01.out ×.avi

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值