live555 + MediaCodec
demo: https://github.com/wangzuxing/MyLive555Mp4
jni端live555接收视频数据(改写testRTSPClient.cpp客户端程序实现rtsp视频数据的接收, 处理,处理后的数据可写入.264文档,作测试验证),通过反射调用java端的pushBuf函数,把接收处理后的nalu推入java端的缓存列表中,然后由java端调用mediacodec进行解码、显示
JAVA端:
MainActivity0:
static {
...
System.loadLibrary("live555"); //liblive555
System.loadLibrary("rtspclient");
}
public native void RtspClient(String program, String sdp); //program: live555接收视频数据写入文档的路径,sdp:视频服务器端的rtsp地址
public native void RtspEnd();
private MediaCodec mCodec;
public void initDecoder() {
mCodec = MediaCodec.createDecoderByType(MIME_TYPE); //MIME_TYPE = “video/avc”
MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, 352, 288);
mCodec.configure(mediaFormat, surface, null, 0);
mCodec.start();
}
//MediaCodec同步解码
public boolean onFrame(byte[] buf, int length) {
// Get input buffer index
ByteBuffer[] inputBuffers = mCodec.getInputBuffers();
int inputBufferIndex = mCodec.dequeueInputBuffer(5); // 4 data buffer
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(buf, 0, length);
mCodec.queueInputBuffer(inputBufferIndex, 0, length, mCount*TIME_INTERNAL, 0); // 添加pts,便于正常显示,TIME_INTERNAL = 25 fps
mCount++;
} else {
return false;
}
// Get output buffer index
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mCodec.dequeueOutputBuffer(bufferInfo, 100);
while (outputBufferIndex >= 0) {
mCodec.releaseOutputBuffer(outputBufferIndex, true); //绑定surface显示
outputBufferIndex = mCodec.dequeueOutputBuffer(bufferInfo, 0);
}
return true;
}
startBtn.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v){
// Do something in response to button click
//访问的rtsp sdp地址
final String sdppath = "rtsp://218.204.223.237:554/live/1/67A7572844E51A64/f68g2mj7wjua3la7.sdp";
//"rtsp://218.204.223.237:554/live/1/67A7572844E51A64/f68g2mj7wjua3la7.sdp";
if(!istarted)
{
received_count = 0;
received_Flag = true;
mList.clear();
close();
initDecoder(); // 初始化video/avc 解码器
istarted = true;
MainActivity0.istarted0 = false;
//建立写入文件
final File f = new File(Environment.getExternalStorageDirectory(), "rtsp0.h264");
try {
if(!f.exists()){
f.createNewFile();
}else{
if(f.delete()){
f.createNewFile();
}
}
} catch (IOException e) {
e.printStackTrace();
}
new Thread(new Runnable() {
@Override
public void run() {
// TODO Auto-generated method stub
RtspClient(f.getPath(), sdppath); // 传入接收数据写入文件的地址,rtsp sdp地址
}
}).start();
//开启解码显示线程
readFileThread = new Thread(readFile);
readFileThread.start();
}
}
});
stopBtn = (Button) findViewById(R.id.conver_btn0);
stopBtn.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v){
// Do something in response to button click
if(istarted){
istarted = false;
RtspEnd();
readFileThread.interrupt();
close();
}
}
});
//live555接收视频的nalu数据存储列表
public static ArrayList<ByteBuffer> mList = new ArrayList<ByteBuffer>();
private int received_count;
public void pushBuf(byte[] buf,int len)
{
ByteBuffer buffer = ByteBuffer.allocate(len);
buffer.put(buf,0,len);
mList.add(buffer);
}
//
Runnable readFile = new Runnable()
{
@Override
public void run() {
boolean readFlag = true;
boolean readFlag0 = true;
boolean flag = false;
byte[] buffer;
int video_count = 0;
while (!Thread.interrupted() && readFlag) {
while(mList.size() > 0){
ByteBuffer sendBuf = mList.get(0);
buffer = sendBuf.array();
if(readFlag0){
if(buffer.length>5 && buffer[4]==0x67){ // sps
flag = onFrame(buffer,buffer.length); // 初始化解码器参数
if(flag) {
mList.remove(0);
video_count++;
}
}
if(buffer.length>5 && buffer[4]==0x68){ // pps
flag = onFrame(buffer, buffer.length); // 初始化解码器参数
if(flag) {
mList.remove(0);
video_count++;
}
readFlag0 = false;
}
}else{
if(buffer.length>5 ){ // 有效Nalu
flag = onFrame(buffer, buffer.length); // 解码显示
mList.remove(0);
}
}
try {
Thread.sleep(TIME_INTERNAL);
} catch (InterruptedException e) {
readFlag = false;
}
}
try {
Thread.sleep(TIME_INTERNAL);
} catch (InterruptedException e) {
readFlag = false;
}
}
}
};
JNI端:
testRTSPClient0.cpp
//DummySink 的afterGettingFrame函数是sink端接收到完成一帧后端回调,则此处理接收到的数据即可
void DummySink::afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes,
struct timeval presentationTime, unsigned durationInMicroseconds) {
// We've just received a frame of data. (Optionally) print out information about it:
int size = 0;
unsigned char *buffer=NULL;
if(runningFlag==1){
if(fReceiveBuffer[0] == 0x65){
LOGI(" sps pps start");
runningFlag = 0;
if(methodid == 0)
{
//FindClass函数用于查找java类,参数为类的包名形式
clazz = _env->FindClass("com/example/mylive555mp4/MainActivity0");
if (clazz == 0) {
LOGI("can't find clazz");
}else{
LOGI("find clazz");
}
_obj = _env->AllocObject(clazz);
//methodid = _env->GetMethodID(clazz, "onFrame","([BI)V");
//获取非静态方法id,根据方法名称和方法参数、返回值类型
methodid = _env->GetMethodID(clazz, "pushBuf", "([BI)V"); //"([BI)Z");
if (methodid == 0) {
LOGI("can't find methodid");
}else{
send_up = 100;
LOGI(" find methodid start");
jbyte *jy=(jbyte*)sps;
jbyteArray jbarray = _env->NewByteArray(12);//jbarray
_env->SetByteArrayRegion(jbarray, 0, 12, jy);
_env->CallVoidMethod(_obj, methodid, jbarray, 12); //jni端调用java端,上传sps到缓冲列表
jbyte *jy0=(jbyte*)pps;
jbyteArray jbarray0 = _env->NewByteArray(8);//jbarray
_env->SetByteArrayRegion(jbarray0, 0, 8, jy0);
_env->CallVoidMethod(_obj, methodid, jbarray0, 8); //jni端调用java端,上传pps到缓冲列表
LOGI(" find methodid end");
}
}
size = frameSize+4;
buffer = (unsigned char *)malloc(size);
//添加start code,便于播放
buffer[0] = 0x0;
buffer[1] = 0x0;
buffer[2] = 0x0;
buffer[3] = 0x1;
memcpy(buffer+4, fReceiveBuffer, frameSize);
/*
Nalu_type:
0x67 (0 11 00111) SPS 非常重要 type = 7
0x68 (0 11 01000) PPS 非常重要 type = 8
0x65 (0 11 00101) IDR 关键帧 type = 5
0x61 (0 10 00001) I帧 重要 type = 1
0x41 (0 10 00001) P帧 type = 1
0x01 (0 00 00001) B帧 type = 1
0x06 (0 00 00110) SEI type = 6
*/
LOGI(" IDR = %d", frameSize);
//写入H.264文件, live555 接收到的是nalu数据需要添加start code,存储的文件播放器才能播放
//int ret = fwrite(buffer, 1, size, fp);
}
}else {
size = frameSize+4;
buffer = (unsigned char *)malloc(size);
//添加start code,便于播放
buffer[0] = 0x0;
buffer[1] = 0x0;
buffer[2] = 0x0;
buffer[3] = 0x1;
memcpy(buffer+4, fReceiveBuffer, frameSize);
//写入H.264文件
//int ret = fwrite(buffer, 1, size, fp);
}
if(runningFlag==0 && methodid != 0){
jbyte *jy=(jbyte*)buffer;
jbyteArray jbarray = _env->NewByteArray(size);//jbarray
_env->SetByteArrayRegion(jbarray, 0, size, jy);
int len = _env->GetArrayLength(jbarray);
jboolean flag = false;
//上传nalu到缓冲列表
_env->CallVoidMethod(_obj, methodid, jbarray, size); //CallBooleanMethod jbarray
}
//usleep(30000); // 30ms
free(buffer);
continuePlaying();
}
Boolean DummySink::continuePlaying() {
if (fSource == NULL) return False; // sanity check (should not happen)
// Request the next frame of data from our input source.
// "afterGettingFrame()" will get called later, when it arrives:
fSource->getNextFrame(fReceiveBuffer, DUMMY_SINK_RECEIVE_BUFFER_SIZE,
afterGettingFrame, this,
onSourceClosure, this);
return True;
}
//传入写入文件地址,rtsp sdp地址
JNIEXPORT void JNICALL Java_com_example_mylive555mp4_MainActivity0_RtspClient
(JNIEnv *env, jclass clz, jstring program, jstring sdp)
{
const char* sdp_title = env->GetStringUTFChars(sdp, NULL);
const char* program_title = env->GetStringUTFChars(program, NULL);
_env = env;
runningFlag = 1;
methodid = 0;
fp = fopen(program_title, "wb");
if(!fp){
LOGI(" h264 fopen error ");
return ;
}
signed char ret = doRtspClient(program_title, sdp_title);
//openURL() 创建RTSPClient
// rtspClient->sendDescribeCommand(continueAfterDESCRIBE);
// continueAfterDESCRIBE()中创建MediaSession
// rtspClient->sendSetupCommand(*scs.subsession, continueAfterSETUP, False, REQUEST_STREAMING_OVER_TCP);
// continueAfterSETUP():
// scs.subsession->sink = DummySink::createNew(env, *scs.subsession, rtspClient->url()); // Sink
// scs.subsession->sink->startPlaying(*(scs.subsession->readSource()), subsessionAfterPlaying, scs.subsession); // Source
// scs.subsession->rtcpInstance()->setByeHandler(subsessionByeHandler, scs.subsession); // Rtcp 实例
// rtspClient->sendPlayCommand(*scs.session, continueAfterPLAY);
env->ReleaseStringUTFChars(sdp, sdp_title);
env->ReleaseStringUTFChars(program, program_title);
}
//视频录制结束调用
JNIEXPORT void Java_com_example_mylive555mp4_MainActivity0_RtspEnd
(JNIEnv *env, jclass clz)
{
shutdownStream(rtspClient);
}
Android.mk:
include $(CLEAR_VARS)
LOCAL_MODULE := ffmpeg
LOCAL_SRC_FILES := libffmpeg.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := live555
LOCAL_SRC_FILES := liblive555.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_C_INCLUDES += \
...
$(LOCAL_PATH)/live555 \
$(LOCAL_PATH)/live555/BasicUsageEnvironment/include \
$(LOCAL_PATH)/live555/liveMedia/include \
$(LOCAL_PATH)/live555/groupsock/include \
$(LOCAL_PATH)/live555/UsageEnvironment/include
LOCAL_SHARED_LIBRARIES := ffmpeg mp3lame faac rtmp x264 live555
LOCAL_MODULE := rtspclient
LOCAL_SRC_FILES := testRTSPClient0.cpp
LOCAL_LDLIBS += -llog -lz
include $(BUILD_SHARED_LIBRARY)