linux c++程序打包为.so java,LINUX(CentOS)下JAVA调用 C++ SO库实现录像功能,SO库封装FFmpeg...

1.首先获取FFmpeg,并编译

1.1

git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg

进入ffmpeg目录:

cd ffmpeg

1.2编译三部曲:

./configure --extra-cflags="-O3 -fPIC" --enable-pic --disable-static --enable-shared

make -j4

make install

注意上面的参数-fPIC,--enable-pic。之前没注意。c++调用so库没问题,java死活掉不起来,找不到ffmpeg函数。

2.C++封装FFmpeg 录像函数

直接上代码,代码没有怎么整理,有点乱,调试部分代码可以删除。RtspStreamMuxTask 文件代码转载自https://blog.csdn.net/toshiba689/article/details/79426680,且用且珍惜。

2.1 mux.h

#pragma once

extern "C" {

//开始录像

//参数:videoSrcAddr 视频源地址,filePath 录像文件路径

//返回录像句柄,异常返回-1

int startRecordVideo(char* videoSrcAddr, char* filePath);

//停止录像

//参数:index录像句柄

//返回传入的句柄号,异常返回-1

int stopRecordVideo(int index);

}

2.2 mux.cpp

#include "mux.h"

#include "RtspStreamMuxTask.h"

#include "TickCounter.h"

#include

#include

#include

using namespace std;

#include "dlog2.h"

std::map > muxTasks;

static volatile int muxTaskNum = 0;

//开始录像

int startRecordVideo(char* videoSrcAddr, char* filePath)

{

Mcgi_log( "%s %s", videoSrcAddr, filePath);

TickCounter tc(__FUNCTION__);

#if 1

std::shared_ptr mux(new RtspStreamMuxTask);

//RtspStreamMuxTask mux;

mux.get()->SetInputUrl(videoSrcAddr);

mux.get()->SetOutputPath(filePath);

mux.get()->StartRecvStream();

muxTasks.insert(make_pair(muxTaskNum, mux)); // >

#else

RtspStreamMuxTask* mux = new RtspStreamMuxTask;

mux->SetInputUrl(videoSrcAddr);

mux->SetOutputPath(filePath);

mux->StartRecvStream();

muxTasks.insert(make_pair(muxTaskNum++, mux));

#endif

printf("**** video record index: %d\n", muxTaskNum);

Mcgi_log( "%d %s %s", muxTaskNum, videoSrcAddr, filePath);

return muxTaskNum++;

}

//停止录像

int stopRecordVideo(int index)

{

TickCounter tc(__FUNCTION__);

printf("111111111 %d\n", muxTasks.count(index));

#if 1

if (muxTasks.count(index))

{

printf("2222222222\n");

muxTasks[index].get()->StopRecvStream();

muxTasks.erase(index);

printf("**** video record count: %d\n", muxTasks.size());

return index;

}

#else

if (muxTasks.count(index) > 0)

{

muxTasks[index]->StopRecvStream();

delete muxTasks[index];

muxTasks.erase(index);

printf("**** video record count: %d\n", muxTasks.size());

return index;

}

#endif

return -1;

}

2.3 RtspStreamMuxTask.h

#ifndef RtspStreamMuxTask_H

#define RtspStreamMuxTask_H

#include

#include

#include

#include

#include

#include

using namespace std;

#ifdef __cplusplus

extern "C"

{

#endif

#include "libavcodec/avcodec.h"

#include "libavformat/avformat.h"

#include "libavfilter/avfilter.h"

#ifdef __cplusplus

};

#endif

class RtspStreamMuxTask

{

public:

RtspStreamMuxTask();

virtual ~RtspStreamMuxTask();

void SetInputUrl(string rtspUrl);

void SetOutputPath(string outputPath);

void StartRecvStream();

void StopRecvStream();

private:

void run();

int OpenInputStream();

void CloseInputStream();

void readAndMux();

static void* ReadingThrd(void * pParam);

int openOutputStream();

void closeOutputStream();

void ReleaseCodecs();

void GetVideoSize(long & width, long & height) //获取视频分辨率

{

width = coded_width;

height = coded_height;

}

private:

string m_inputUrl;

string m_outputFile;

AVFormatContext* m_inputAVFormatCxt;

AVBitStreamFilterContext* m_bsfcAAC;

AVBitStreamFilterContext* m_bsfcH264;

int m_videoStreamIndex;

int m_audioStreamIndex;

AVFormatContext* m_outputAVFormatCxt;

char m_tmpErrString[64];

volatile bool m_stop_status;

pthread_t m_hReadThread;

bool m_bInputInited;

bool m_bOutputInited;

int coded_width, coded_height;

int m_frame_rate;

};

#endif // RtspStreamMuxTask_H

// https://blog.csdn.net/toshiba689/article/details/79426680

2.4 RtspStreamMuxTask.cpp

#include "RtspStreamMuxTask.h"

#include

#include "TickCounter.h"

#include "dlog2.h"

//

RtspStreamMuxTask::RtspStreamMuxTask()

{

//dLog( char* dIfomSource, char* dIfomLevel, char* dFmt, ... );

Mcgi_log( "%s", __FUNCTION__ );

m_stop_status = false;

m_inputAVFormatCxt = NULL;

m_bsfcAAC = NULL;

m_bsfcH264 = NULL;

m_videoStreamIndex = -1;

m_audioStreamIndex = -1;

m_outputAVFormatCxt = NULL;

m_hReadThread = NULL;

m_bInputInited = false;

m_bOutputInited = false;

coded_width = coded_height = 0;

m_frame_rate = 25;

}

RtspStreamMuxTask::~RtspStreamMuxTask()

{

StopRecvStream();

}

void RtspStreamMuxTask::SetInputUrl(string rtspUrl)

{

Mcgi_log( "%s", __FUNCTION__ );

m_inputUrl = rtspUrl;

}

void RtspStreamMuxTask::SetOutputPath(string outputPath)

{

Mcgi_log( "%s", __FUNCTION__ );

m_outputFile = outputPath;

}

void RtspStreamMuxTask::StartRecvStream()

{

Mcgi_log( "%s", __FUNCTION__ );

TickCounter tc(__FUNCTION__);

if(m_inputUrl.empty())

return;

m_videoStreamIndex = -1;

m_audioStreamIndex = -1;

m_bInputInited = false;

m_bOutputInited = false;

coded_width = coded_height = 0;

int err = pthread_create(&m_hReadThread, NULL, ReadingThrd, this);

//pthread_detach(m_hReadThread);

//sleep(1);

}

void RtspStreamMuxTask::StopRecvStream()

{

TickCounter tc(__FUNCTION__);

m_stop_status = true;

printf("333333333 m_stop_status: %d\n", m_stop_status);

//sleep(1);

if (m_hReadThread != NULL)

{

//WaitForSingleObject(m_hReadThread, INFINITE);

void* rval1;

pthread_join(m_hReadThread, &rval1);

//CloseHandle(m_hReadThread);

m_hReadThread = NULL;

}

CloseInputStream();

}

void* RtspStreamMuxTask::ReadingThrd(void * pParam)

{

Mcgi_log( "%s", __FUNCTION__ );

TickCounter tc(__FUNCTION__);

RtspStreamMuxTask * pTask = (RtspStreamMuxTask *) pParam;

pTask->run();

return 0;

}

void RtspStreamMuxTask::run()

{

Mcgi_log( "%s", __FUNCTION__ );

TickCounter tc(__FUNCTION__);

try

{

//m_stop_status = false;

OpenInputStream();

openOutputStream();

//m_stop_status = false;

readAndMux();

CloseInputStream();

closeOutputStream();

}

catch(std::exception& e)

{

printf("%s \n", e.what());

CloseInputStream();

}

}

int RtspStreamMuxTask::OpenInputStream()

{

Mcgi_log( "%s start", __FUNCTION__ );

TickCounter tc(__FUNCTION__);

if (m_inputAVFormatCxt)

{

string strError = ("already has input avformat");

printf("%s \n", strError.c_str());

Mcgi_log( "%s %s", __FUNCTION__, strError.c_str());

return -1;

}

Mcgi_log( "%s 1", __FUNCTION__ );

m_bsfcAAC = av_bitstream_filter_init("aac_adtstoasc");

if(!m_bsfcAAC)

{

string strError = ("can not create aac_adtstoasc filter");

printf("%s \n", strError.c_str());

Mcgi_log( "%s %s", __FUNCTION__, strError.c_str());

return -1;

}

Mcgi_log( "%s 2", __FUNCTION__ );

m_bsfcH264 = av_bitstream_filter_init("h264_mp4toannexb");

if(!m_bsfcH264)

{

string strError = ("can not create h264_mp4toannexb filter");

printf("%s \n", strError.c_str());

Mcgi_log( "%s %s", __FUNCTION__, strError.c_str());

return -1;

}

///

Mcgi_log( "%s 3", __FUNCTION__ );

int res = 0;

AVDictionary* options = NULL;

//av_dict_set(&options, "bufsize", "4096000", 0);

av_dict_set(&options, "rtsp_transport", "tcp", 0); //采用tcp传输

res = avformat_open_input(&m_inputAVFormatCxt, m_inputUrl.c_str(), 0, &options);

Mcgi_log( "%s 4", __FUNCTION__ );

if(res < 0)

{

string strError = ("can not open file:" + m_inputUrl + ",errcode:" + to_string(res) + ",err msg:" + av_make_error_string(m_tmpErrString, AV_ERROR_MAX_STRING_SIZE, res));

printf("%s \n", strError.c_str());

Mcgi_log( "%s %s", __FUNCTION__, strError.c_str());

return -1;

}

Mcgi_log( "%s 5", __FUNCTION__ );

if (avformat_find_stream_info(m_inputAVFormatCxt, 0) < 0)

{

string strError = ("can not find stream info");

printf("%s \n", strError.c_str());

Mcgi_log( "%s %s", __FUNCTION__, strError.c_str());

return -1;

}

Mcgi_log( "%s 6", __FUNCTION__ );

av_dump_format(m_inputAVFormatCxt, 0, m_inputUrl.c_str(), 0);

Mcgi_log( "%s 7", __FUNCTION__ );

for (int i = 0; i < m_inputAVFormatCxt->nb_streams; i++)

{

AVStream *in_stream = m_inputAVFormatCxt->streams[i];

printf("codec id: %d, URL: %s \n", in_stream->codec->codec_id, m_inputUrl.c_str());

if (in_stream->codec->codec_type == AVMEDIA_TYPE_VIDEO)

{

m_videoStreamIndex = i;

coded_width = in_stream->codec->width;

coded_height = in_stream->codec->height;

if(in_stream->avg_frame_rate.den != 0 && in_stream->avg_frame_rate.num != 0)

{

m_frame_rate = in_stream->avg_frame_rate.num/in_stream->avg_frame_rate.den;//每秒多少帧

}

printf("video stream index: %d, width: %d, height: %d, FrameRate: %d\n", m_videoStreamIndex, in_stream->codec->width, in_stream->codec->height, m_frame_rate);

}

else if (in_stream->codec->codec_type == AVMEDIA_TYPE_AUDIO)

{

m_audioStreamIndex = i;

}

}

m_bInputInited = true;

Mcgi_log( "%s end", __FUNCTION__ );

return 0;

}

void RtspStreamMuxTask::CloseInputStream()

{

TickCounter tc(__FUNCTION__);

if (m_inputAVFormatCxt)

{

avformat_close_input(&m_inputAVFormatCxt);

}

if(m_bsfcAAC)

{

av_bitstream_filter_close(m_bsfcAAC);

m_bsfcAAC = NULL;

}

if(m_bsfcH264)

{

av_bitstream_filter_close(m_bsfcH264);

m_bsfcH264 = NULL;

}

m_bInputInited = false;

}

int RtspStreamMuxTask::openOutputStream()

{

Mcgi_log( "%s start", __FUNCTION__ );

TickCounter tc(__FUNCTION__);

if (m_outputAVFormatCxt)

{

printf("already has rtmp avformat \n");

return -1;

}

int res = 0;

if(!m_outputFile.empty())

{

res = avformat_alloc_output_context2(&m_outputAVFormatCxt, NULL, NULL, m_outputFile.c_str());

if (m_outputAVFormatCxt == NULL)

{

printf("can not alloc output context \n");

return -1;

}

AVOutputFormat* fmt = m_outputAVFormatCxt->oformat;

//fmt->audio_codec = AV_CODEC_ID_AAC;

//fmt->video_codec = AV_CODEC_ID_H264;

for (int i = 0; i < m_inputAVFormatCxt->nb_streams; i++)

{

AVStream *in_stream = m_inputAVFormatCxt->streams[i];

AVStream *out_stream = avformat_new_stream(m_outputAVFormatCxt, in_stream->codec->codec);

if (!out_stream)

{

printf("can not new out stream");

return -1;

}

res = avcodec_copy_context(out_stream->codec, in_stream->codec);

if (res < 0)

{

string strError = "can not copy context, url: " + m_inputUrl + ",errcode:" + to_string(res) + ",err msg:" + av_make_error_string(m_tmpErrString, AV_ERROR_MAX_STRING_SIZE, res);

printf("%s \n", strError.c_str());

return -1;

}

#define AV_CODEC_FLAG_GLOBAL_HEADER (1 << 22)

#define CODEC_FLAG_GLOBAL_HEADER AV_CODEC_FLAG_GLOBAL_HEADER

//#define AVFMT_RAWPICTURE 0x0020

if (m_outputAVFormatCxt->oformat->flags & AVFMT_GLOBALHEADER)

{

out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;

}

}

av_dump_format(m_outputAVFormatCxt, 0, m_outputFile.c_str(), 1);

if (!(fmt->flags & AVFMT_NOFILE))

{

res = avio_open(&m_outputAVFormatCxt->pb, m_outputFile.c_str(), AVIO_FLAG_WRITE);

if (res < 0)

{

string strError = "can not open output io, file:" + m_outputFile + ",errcode:" + to_string(res) + ", err msg:" + av_make_error_string(m_tmpErrString, AV_ERROR_MAX_STRING_SIZE, res);

printf("%s \n", strError.c_str());

Mcgi_log("%s", strError.c_str());

return -1;

}

}

res = avformat_write_header(m_outputAVFormatCxt, NULL);

if (res < 0)

{

string strError = "can not write outputstream header, URL:" + m_outputFile + ",errcode:" + to_string(res) + ", err msg:" + av_make_error_string(m_tmpErrString, AV_ERROR_MAX_STRING_SIZE, res);

printf("%s \n", strError.c_str());

m_bOutputInited = false;

return -1;

}

m_bOutputInited = true;

}

Mcgi_log("%s end", __FUNCTION__);

return 0;

}

void RtspStreamMuxTask::closeOutputStream()

{

TickCounter tc(__FUNCTION__);

if (m_outputAVFormatCxt)

{

if(m_bOutputInited)

{

int res = av_write_trailer(m_outputAVFormatCxt);

}

if (!(m_outputAVFormatCxt->oformat->flags & AVFMT_NOFILE))

{

if(m_outputAVFormatCxt->pb)

{

avio_close(m_outputAVFormatCxt->pb);

}

}

avformat_free_context(m_outputAVFormatCxt);

m_outputAVFormatCxt = NULL;

}

m_bOutputInited = false;

}

void RtspStreamMuxTask::readAndMux()

{

Mcgi_log( "%s start", __FUNCTION__ );

TickCounter tc(__FUNCTION__);

int nVideoFramesNum = 0;

int64_t first_pts_time = 0;

//DWORD start_time = GetTickCount();

AVPacket pkt;

av_init_packet(&pkt);

while(!m_stop_status)

{

int res;

res = av_read_frame(m_inputAVFormatCxt, &pkt);

if (res < 0) //读取错误或流结束

{

if(AVERROR_EOF == res)

{

printf("End of file \n");

}

else

{

printf("av_read_frame() got error: %d \n", res);

}

break;

}

AVStream *in_stream = m_inputAVFormatCxt->streams[pkt.stream_index];

AVStream *out_stream = m_outputAVFormatCxt->streams[pkt.stream_index];

//pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));

//pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));

//pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);

pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base, AV_ROUND_NEAR_INF);

pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream->time_base, out_stream->time_base, AV_ROUND_NEAR_INF);

pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);

pkt.pos = -1;

if(in_stream->codec->codec_type != AVMEDIA_TYPE_VIDEO && in_stream->codec->codec_type != AVMEDIA_TYPE_AUDIO)

{

continue;

}

if(in_stream->codec->codec_type == AVMEDIA_TYPE_VIDEO) //视频

{

nVideoFramesNum++;

// write the compressed frame to the output format

int nError = av_interleaved_write_frame(m_outputAVFormatCxt, &pkt);

if (nError != 0)

{

char tmpErrString[AV_ERROR_MAX_STRING_SIZE] = {0};

av_make_error_string(tmpErrString, AV_ERROR_MAX_STRING_SIZE, nError);

printf("Error: %d while writing video frame, %s\n", nError, tmpErrString);

}

//int nSecs = pkt.pts*in_stream->time_base.num/in_stream->time_base.den;

//printf("Frame time: %02d:%02d \n", nSecs/60, nSecs%60);

}

else if(in_stream->codec->codec_type == AVMEDIA_TYPE_AUDIO) //音频

{

#if 0

// write the compressed frame to the output format

int nError = av_interleaved_write_frame(m_outputAVFormatCxt, &pkt);

if (nError != 0)

{

char tmpErrString[AV_ERROR_MAX_STRING_SIZE] = {0};

av_make_error_string(tmpErrString, AV_ERROR_MAX_STRING_SIZE, nError);

printf("Error: %d while writing audio frame, %s\n", nError, tmpErrString);

}

#endif

}

if((in_stream->codec->codec_type == AVMEDIA_TYPE_VIDEO) )

{

if(first_pts_time == 0)

first_pts_time = pkt.pts;

int64_t pts_time = (pkt.pts - first_pts_time)*1000*in_stream->time_base.num/in_stream->time_base.den; //转成毫秒

//int64_t now_time = GetTickCount() - start_time;

//if(pts_time > now_time + 10 && pts_time < now_time + 3000)

//{

// Sleep(pts_time-now_time);

//}

//else if(pts_time == 0 && nVideoFramesNum > 1)

//{

// Sleep(20);

//}

}

av_free_packet(&pkt);

static int frameId = 0;

frameId++;

if (frameId % 10 == 1)

printf("frameId: %d\n", frameId);

//TickCounter tc("while");

}//while

Mcgi_log( "%s end", __FUNCTION__ );

printf("Reading ended, read %d video frames \n", nVideoFramesNum);

}

// https://blog.csdn.net/toshiba689/article/details/79426680

2.6 TickCounter.h

#pragma once

#include

using namespace std;

class TickCounter

{

public:

TickCounter(string debug);

~TickCounter();

string debug;

};

2.7 TickCounter.cpp

#include "TickCounter.h"

#include

TickCounter::TickCounter(string debug)

{

printf("%s enter

this->debug = debug;

}

TickCounter::~TickCounter()

{

printf("%s leave ---->\n", debug.c_str());

}

2.8 Makefile

#头文件

#INCLUDE = $(shell pkg-config --cflags opencv)

INCLUDE =

INCLUDE += -I.

#扩展库

LIBS =

#LIBS = $(shell pkg-config --libs opencv)

LIBS += -lpthread -lavformat -lavcodec -lavdevice -lavfilter -lavutil -lswresample

#-lm -lz

CFLAGS = -O3 -Wall -std=c++11

#-fPIC -shared

# 源文件

SOURCES = RtspStreamMuxTask.cpp TickCounter.cpp dlog2.cpp mux.cpp

#main.cpp

# 可执行文件

OBJECTS = $(SOURCES:.cpp=.o)

TARGET = libmux.so

$(TARGET):$(OBJECTS)

g++ -shared $(CFLAGS) $(OBJECTS) $(LIBS) -o $(TARGET)

#-fPIC

$(OBJECTS):$(SOURCES)

g++ -fPIC $(CFLAGS) -c $(SOURCES) $(LIBS)

#

clean:

rm $(OBJECTS) $(TARGET)

# 编译规则 $@代表目标文件 $< 代表第一个依赖文件

%.o:%.cpp

g++ -fPIC $(CFLAGS) $(INCLUDE) -o $@ -c $<

2.9 JAVA代码

import com.sun.jna.Library;

import com.sun.jna.Native;

public class TestSo

{

public interface CMux extends Library {

// 调用linux下面的so文件,注意,这里只要写mux就可以了,不要写libmux,也不要加后缀

CMux INSTANCE = (CMux) Native.loadLibrary("mux", CMux.class);

int startRecordVideo(String videoSrcAddr, String filePath);

int stopRecordVideo(int index);

}

public int startRecordVideo(String videoSrcAddr, String filePath){

return CMux.INSTANCE.startRecordVideo(videoSrcAddr, filePath);

}

public int stopRecordVideo(int index){

return CMux.INSTANCE.stopRecordVideo(index);

}

public static void main(String[] args) {

TestSo ts = new TestSo();

String videoSrcAddr = "rtsp://172.28.175.86:8554/channel=0";

String filePath = "/root/yangxm/work/rec/0.mp4";

int c = ts.startRecordVideo(videoSrcAddr, filePath);

try

{

Thread.sleep(10000);

}

catch(Exception e)

{

;

}

System.out.println(videoSrcAddr + "," + filePath);

}

}

2.9.1 编译

javac TestSo.java

2.9.2 调用

java TestSo

  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值