Android音视频开发图解

Android音视频开发学习路线,具备这些知识

摄像头相关配置参数

实时获取摄像头原始数据

NV21原始数据编码操作的线程,编码h.264格式数据

public void startEncoderThread(){
        Thread EncoderThread = new Thread(new Runnable() {

            @SuppressLint("NewApi")
            @Override
            public void run() {
                isRuning = true;
                byte[] input = null;
                long pts =  0;
                long generateIndex = 0;

                while (isRuning) {
                    if (CameraSurfaceView.YUVQueue.size() >0){
                        input = CameraSurfaceView.YUVQueue.poll();
                        byte[] yuv420sp = new byte[m_width*m_height*3/2];
                        byte[] yuv420 = new byte[m_width*m_height*3/2];
                        VideoYUVUtil.NV21ToNV12(input,yuv420sp,m_width,m_height);
//                        VideoYUVUtil.YUV420spRotate90Clockwise(yuv420sp,yuv420,m_width,m_height);
                        input = yuv420sp;
                    }
                    if (input != null) {
                        try {
                            long startMs = System.currentTimeMillis();
                            ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
                            ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
                            int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1);
                            if (inputBufferIndex >= 0) {
                                pts = computePresentationTime(generateIndex);
                                ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
                                inputBuffer.clear();
                                inputBuffer.put(input);
                                mediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, pts, 0);
                                generateIndex += 1;
                            }

                            MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
                            int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
                            while (outputBufferIndex >= 0) {
                                //Log.i("AvcEncoder", "Get H264 Buffer Success! flag = "+bufferInfo.flags+",pts = "+bufferInfo.presentationTimeUs+"");
                                ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
                                byte[] outData = new byte[bufferInfo.size];
                                outputBuffer.get(outData);
                                if(bufferInfo.flags == 2){
                                    configbyte = new byte[bufferInfo.size];
                                    configbyte = outData;
                                }else if(bufferInfo.flags == 1){
                                    byte[] keyframe = new byte[bufferInfo.size + configbyte.length];
                                    System.arraycopy(configbyte, 0, keyframe, 0, configbyte.length);
                                    System.arraycopy(outData, 0, keyframe, configbyte.length, outData.length);

                                    outputStream.write(keyframe, 0, keyframe.length);
                                    if(onEncoderVideoListener!=null){
                                        onEncoderVideoListener.encoderVideoData(keyframe,keyframe.length);
                                    }
                                }else{
                                    outputStream.write(outData, 0, outData.length);
                                    if(onEncoderVideoListener!=null){
                                        onEncoderVideoListener.encoderVideoData(outData,outData.length);
                                    }
                                }

                                mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
                                outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
                            }

                        } catch (Throwable t) {
                            t.printStackTrace();
                        }
                    } else {
                        try {
                            Thread.sleep(500);
                        } catch (InterruptedException e) {
                            e.printStackTrace();
                        }
                    }
                }
            }
        });
        EncoderThread.start();

    }

实时采集原始PCM音频数据的线程

原始PCM数据编码AAC音频数据

 /**
     * 编码AAC音频线程
     */
    public void startAudioEncodeAAC() {
        audioEncoderThread = new Thread() {
            @Override
            public void run() {
                try {
                 while (!audioEncoderLoop && !Thread.interrupted()) {
                    AudioData audioData = audioQueue.take();
                    byte[] buffer = audioData.audioData;
                if(audioData != null && encoder != null)
                {
                    int size = audioData.audioData.length;
                    //录音时间  size/ 采样率*声道数 * bits/8
                    recordTime += size * 1.0 / (audioSamplerate * 2 * (16 / 8));
//                    LogUtils.d("recordTime = " + recordTime);

                    int inputBufferindex = encoder.dequeueInputBuffer(0);
                    if(inputBufferindex >= 0)
                    {
                        ByteBuffer byteBuffer = encoder.getInputBuffers()[inputBufferindex];
                        byteBuffer.clear();
                        byteBuffer.put(buffer);
                        encoder.queueInputBuffer(inputBufferindex, 0, size, 0, 0);
                    }

                    int index = encoder.dequeueOutputBuffer(info, 0);
                    while(index >= 0)
                    {
                        try {
                            perpcmsize = info.size + 7;
                            outByteBuffer = new byte[perpcmsize];

                            ByteBuffer byteBuffer = encoder.getOutputBuffers()[index];
                            byteBuffer.position(info.offset);
                            byteBuffer.limit(info.offset + info.size);

                            addADtsHeader(outByteBuffer, perpcmsize, aacsamplerate);

                            byteBuffer.get(outByteBuffer, 7, info.size);
                            byteBuffer.position(info.offset);
                            if (audioEncderListener!=null){
                                audioEncderListener.encoderAudioData(outByteBuffer,perpcmsize);
                            }
                            encoder.releaseOutputBuffer(index, false);
                            index = encoder.dequeueOutputBuffer(info, 0);
                            outByteBuffer = null;
                        } catch (Exception e) {
                            e.printStackTrace();
                        }
                    }
                }}
                } catch (InterruptedException e) {
                    e.printStackTrace();
                }
            }
        };
        audioEncoderThread.start();
    }

编码好的h.264视频数据和AAC音频数据经过封包成数据包通过网络协议(rtmp)实时发送数据。

将接收到的视频h.264数据解码成原始数据并播放。

送入解码线程进行解码操作

H.264实时解码成原始数据并进行渲染画面

接收到的AAC音频数据送入解码音频的线程进行解码还原成原始PCM音频数据

/**
 * aac解码+播放
 */
public void decode(byte[] buf, int offset, int length) {
    //输入ByteBuffer
    ByteBuffer[] codecInputBuffers = mDecoder.getInputBuffers();
    //输出ByteBuffer
    ByteBuffer[] codecOutputBuffers = mDecoder.getOutputBuffers();
    //等待时间,0->不等待,-1->一直等待
    long kTimeOutUs = 0;
    try {
        //返回一个包含有效数据的input buffer的index,-1->不存在
        int inputBufIndex = mDecoder.dequeueInputBuffer(kTimeOutUs);
        if (inputBufIndex >= 0) {
            //获取当前的ByteBuffer
            ByteBuffer dstBuf = codecInputBuffers[inputBufIndex];
            //清空ByteBuffer
            dstBuf.clear();
            //填充数据
            dstBuf.put(buf, offset, length);
            //将指定index的input buffer提交给解码器
            mDecoder.queueInputBuffer(inputBufIndex, 0, length, 0, 0);
        }
        //编解码器缓冲区
        MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
        //返回一个output buffer的index,-1->不存在
        int outputBufferIndex = mDecoder.dequeueOutputBuffer(info, kTimeOutUs);
        if (outputBufferIndex < 0) {
            //记录解码失败的次数
            count++;
        }
        ByteBuffer outputBuffer;
        while (outputBufferIndex >= 0) {
            //获取解码后的ByteBuffer
            outputBuffer = codecOutputBuffers[outputBufferIndex];
            //用来保存解码后的数据
            byte[] outData = new byte[info.size];
            outputBuffer.get(outData);
            //清空缓存
            outputBuffer.clear();
            //播放解码后的数据
            mPlayer.playAudioTrack(outData, 0, info.size);
            //释放已经解码的buffer
            mDecoder.releaseOutputBuffer(outputBufferIndex, false);
            //解码未解完的数据
            outputBufferIndex = mDecoder.dequeueOutputBuffer(info, kTimeOutUs);
        }
    } catch (Exception e) {
        e.printStackTrace();
    }

将解码后的PCM送入AudioTrack进行播放,这个时候就有声音了。

 

因为音频和视频解码是分开的,所以在解码后,需要做音视频同步。通常有一些音视频同步算法。如在ffmepg中,以音频作为参考时钟,视频用于比较当前时钟和音频时钟的差值,如果快了,就要增大延迟,以便下一帧显示的晚一些;如果慢了,就要减少延迟,加快显示下一帧。

感谢各位关注。

 

  • 1
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值