live555 server mediacodec应用

live555 server + mediacodec

demo: https://github.com/wangzuxing/MyLive555ServerMediaCodec

java端创建缓存文件, 开启camera preview并调用mediacodec编码摄像头预览数据,把视频编码数据写入缓存文件,调用jni端live555的H264VideoFileServerMediaSubsession建立H.264视频文件流服务子会话(简单验证实时的视频流服务功能,文件写入、读出方式实现视频流服务的方式,效率比较低)

Java端:
StreamerActivity2:

 static {
         ...
         System.loadLibrary("live555");
       System.loadLibrary("streamer");
    }

    ...
    public static native void end();
    public static native void RtspTempFile(String filename);  //1、创建缓存文件
    public static native void WriteRtspFrame(byte[] data, int size); //jni端写编码数据入缓存文件,测试用   
    public static native int  RtspServer(String filename);    //2、调用live555的H264VideoFileServerMediaSubsession,实现.264视频文件媒体会话子服务

    //yv12 转 yuv420p  yvu -> yuv  
    private void swapYV12toI420(byte[] yv12bytes, byte[] i420bytes, int width, int height)   
    {        
        System.arraycopy(yv12bytes, 0, i420bytes, 0,width*height);  
        System.arraycopy(yv12bytes, width*height+width*height/4, i420bytes, width*height,width*height/4);  
        System.arraycopy(yv12bytes, width*height, i420bytes, width*height+width*height/4,width*height/4);    
    } 

    // MediaCodec(类型为"video/avc") 同步编码
    public void onFrame(byte[] buf, int length) {        
            swapYV12toI420(buf, h264, width, height); 

            ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
            ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
            int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1);
            if (inputBufferIndex >= 0) {
                ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
                inputBuffer.clear();
                inputBuffer.put(h264, 0, length);
                mediaCodec.queueInputBuffer(inputBufferIndex, 0, length, 0, 0);
            }
            MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
            int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo,0);
            while (outputBufferIndex >= 0) {
                ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];

              byte[] outData = new byte[bufferInfo.size];
              outputBuffer.get(outData);
                    if(runOneFlag){
                         if(outData.length == 21){
                              System.arraycopy(outData, 0,  outData0, 0, 21); //暂存sps pps,编码器会一次性输出sps和pps包(sps pps长度因视频高、宽等参数会有差异,
                                                                              //编码器输出的nalu单元自动带start code)
                              runOneFlag = false;
                         }
                    }
                    runPushFlag = true;
                    if(runPushFlag)
                    {
                         if(runTestFlag)
                         {
                             frame_count2++;
                             if(frame_count2 >= 50){ //缓存50帧
                                  runTestFlag = false;
                                  new Thread() {
                                       @Override
                                       public void run() {
                                            RtspServer(h264Path); //开启RTSPServer的 H264VideoFileServerMediaSubsession
                                       }
                                 }.start();
                             }
                         }
                       try {
                             outputStream.write(outData, 0, outData.length); //nalu数据,jav端a写入.264文件, outputStream = new BufferedOutputStream(new FileOutputStream(**));
                         } catch (IOException e) {
                             // TODO Auto-generated catch block
                             e.printStackTrace();
                         }
                    }
                    //WriteRtspFrame(outData, outData.length);  //nalu数据,jni端写入.264文件
                mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
                outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
            }
      }

JNI端:
streamer.cpp:

JNIEXPORT jint JNICALL Java_com_example_mylive55servermediacodec_StreamerActivity2_RtspServer
(JNIEnv *env1, jobject obj, jstring filename)
{
    TaskScheduler* scheduler = BasicTaskScheduler::createNew();
    UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);

    UserAuthenticationDatabase* authDB = NULL;

    RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554, authDB);
    if (rtspServer == NULL) {
        LOGI("Failed to create RTSP server:%s", env->getResultMsg());
        return -1;
    }
    char const* descriptionString = "Session streamed by \"testOnDemandRTSPServer\"";

    quit = 0;
    ServerMediaSession* sms = NULL;
    // A H.264 video elementary stream:
    {
        char const* streamName = "mstreamer";
        char const* inputFileName = env1->GetStringUTFChars(filename, NULL); 

        LOGI("   inputFileName = %s", inputFileName);
      sms = ServerMediaSession::createNew(*env, streamName, streamName, descriptionString);
        sms->addSubsession(H264VideoFileServerMediaSubsession::createNew(*env, inputFileName, reuseFirstSource0));
        rtspServer->addServerMediaSession(sms);

        announceStream0(rtspServer, sms, streamName, inputFileName);
    }
  /*
    {
        char const* streamName = "ts";
        char const* inputFileName = "/sdcard/H264_720p.ts";
        char const* indexFileName = "test.tsx";
        ServerMediaSession* sms = ServerMediaSession::createNew(*env,
                streamName, streamName, descriptionString);
        sms->addSubsession(
                MPEG2TransportFileServerMediaSubsession::createNew(*env,
                        inputFileName, indexFileName, reuseFirstSource0));
        rtspServer->addServerMediaSession(sms);

        announceStream0(rtspServer, sms, streamName, inputFileName);
    }
  */
    if (rtspServer->setUpTunnelingOverHTTP(80)
            || rtspServer->setUpTunnelingOverHTTP(8000)
            || rtspServer->setUpTunnelingOverHTTP(8080)) {
        LOGI("we use port-->%d", rtspServer->httpServerPortNum());
    } else {
        LOGI("RTSP-over-HTTP tunneling is not available.");
    }

    env->taskScheduler().doEventLoop(&quit);

    rtspServer->removeServerMediaSession(sms);
    Medium::close(rtspServer);

    env->reclaim();
    env = NULL;
  delete scheduler;
    scheduler = NULL;

    LOGI("......End......\n");

    return 0;
}

JNIEXPORT void JNICALL Java_com_example_mylive55servermediacodec_StreamerActivity2_end(JNIEnv *env, jobject obj) {
     LOGI("Ending streaming...\n");
     quit = 1;
     // Medium::close(rtspServer);
}

JNIEXPORT void JNICALL Java_com_example_mylive55servermediacodec_StreamerActivity2_RtspTempFile
(JNIEnv *env, jobject obj, jstring filename)
{
    char const*inputFilename0 = env->GetStringUTFChars(filename, NULL);
    inputFilename = inputFilename0;

    file = fopen(inputFilename0, "wb+");
    if (!file) {
        LOGE("couldn't open %s", inputFilename0);
        exit(1);
    }

    LOGE("  RtspTempFile open %s  ", inputFilename0);
}

//添加视频帧的方法
JNIEXPORT void JNICALL Java_com_example_mylive55servermediacodec_StreamerActivity2_WriteRtspFrame(
        JNIEnv *env, jclass clz, jbyteArray data, jint size)
{
    unsigned char *buf = (unsigned char *)env->GetByteArrayElements(data, JNI_FALSE);
    //length
    pthread_mutex_lock(&mutex);
  int ret = fwrite(buf, 1, size, file);
  if(ret != 0){
       LOGI(" IDR = %d, %d", ret, size);
  }
  pthread_mutex_unlock(&mutex);
    env->ReleaseByteArrayElements(data, (jbyte *)buf, 0);
}

这里写图片描述

这里写图片描述

  • 0
    点赞
  • 2
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值