王学岗————直播推流(软编)03x264集成与camera推流

前言

1,软解码只有两种方式:x264只能做编码视频,功能单一;ffmpeg功能齐全。
2,faac用来做音频编码。
3,软解码不能用mediacodec。

架构解析

1,队列要放到native层。硬编的队列放到Java层,这是不一样的地方
2,队列一定要放压缩数据,而不能放原始数据。
3,今天用摄像头数据。
4,camerax不需要旋转,camera1需要自己写旋转算法
5,x264需要自己编译。
6,与MediaCodec(从cpu到dsp在到CPU)最大不同是都在cpu。要有数据存放的地方,x264_picture_t存储原始容器(如yuv)。
7,yuv如何计算?
在这里插入图片描述
y是width*height
9,编码在Native层,队列也只能在native层
10,.

代码

添加camerax依赖

 implementation "androidx.camera:camera-core:1.0.0-alpha05"
    implementation "androidx.camera:camera-camera2:1.0.0-alpha05"

在这里插入图片描述

package com.maniu.x264rtmpmaniu.camerax;

import android.graphics.SurfaceTexture;
import android.media.MediaCodec;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.util.Size;
import android.view.TextureView;
import android.view.ViewGroup;
import androidx.camera.core.CameraX;
import androidx.camera.core.ImageAnalysis;
import androidx.camera.core.ImageAnalysisConfig;
import androidx.camera.core.ImageProxy;
import androidx.camera.core.Preview;
import androidx.camera.core.PreviewConfig;
import androidx.lifecycle.LifecycleOwner;

import com.maniu.x264rtmpmaniu.LivePusher;
import com.maniu.x264rtmpmaniu.utils.ImageUtil;
import java.util.concurrent.locks.ReentrantLock;
//camerax简单
public class VideoChanel implements Preview.OnPreviewOutputUpdateListener, ImageAnalysis.Analyzer {
    private CameraX.LensFacing currentFacing = CameraX.LensFacing.BACK;
    //CameraX 宽高我们不需要适配,camerax会自动适配
    int width = 480;
    int height = 640;
    private TextureView textureView;
    private HandlerThread handlerThread;
    LivePusher livePusher;
    private MediaCodec mediaCodec;
    private boolean isLiving;



//    lifecycleOwner activity,livePusher做两件事,一件是初始化native层VideoChannel,一件是推流
    public VideoChanel(LifecycleOwner lifecycleOwner, TextureView textureView, LivePusher livePusher) {
        this.livePusher = livePusher;
//        初始化
        this.textureView = textureView;
        handlerThread = new HandlerThread("Analyze-thread");
        handlerThread.start();
        //
        ImageAnalysisConfig imageAnalysisConfig =  new ImageAnalysisConfig.Builder() .
                //图片分析回调在哪个线程,一定不能是主线程。
                 setCallbackHandler(new Handler(handlerThread.getLooper()))
                .setLensFacing(currentFacing)
                //渲染的模式,渲染最新的图片
                .setImageReaderMode(ImageAnalysis.ImageReaderMode.ACQUIRE_LATEST_IMAGE)
                .setTargetResolution(new Size(width, height))
                .build();
        ImageAnalysis imageAnalysis = new ImageAnalysis(imageAnalysisConfig);
        //
        imageAnalysis.setAnalyzer(this);
        //width, height宽高
        PreviewConfig previewConfig = new PreviewConfig.Builder()  .setTargetResolution(new Size(width, height))
        .setLensFacing(currentFacing) //前置或者后置摄像头
        .build();
        Preview preview = new Preview(previewConfig);
        //如果预览需要添加监听
        preview.setOnPreviewOutputUpdateListener(this);
        //绑定Activity,Usecase是一个不定的数组。
        //camerax 封装了 数据,拍照,预览(数据cpu传到gpu渲染),视频分析
        //imageAnalysis 直播,获取到的是一帧帧的画面
        //preview  预览画面
        CameraX.bindToLifecycle(lifecycleOwner,preview,imageAnalysis);

    }
    private ReentrantLock lock = new ReentrantLock();
    private byte[] y;
    private byte[] u;
    private byte[] v;
    // 图像帧数据,全局变量避免反复创建,降低gc频率
    private byte[] nv21;
    byte[] nv21_rotated;
    byte[] nv12;
    //类似camera1 previewCallback
    @Override
    public void analyze(ImageProxy image, int rotationDegrees) {
        //摄像头一旦打开,就会回调analyze(),数据就在image中
        if (!isLiving) {
            return;
        }

        Log.i("david", "analyze: ");
        // 开启直播并且已经成功连接服务器才获取i420数据
        //planes[0] planes[1] planes[2]分别是yuv
        ImageProxy.PlaneProxy[] planes = image.getPlanes();
        lock.lock();//子线程调用,需要锁锁住。
        // 重复使用同一批byte数组,减少gc频率
        //camera1会直接把yuv编成Nv21了,结果确实吃力不讨好,camerax会把Yuv分别放到bytebuffer数组中
        if (y == null) {
            //y每一帧的大小都可以算出来,width X height,但不建议这么写,建议使用limit()
            //limit()获取整个容器长度
            y = new byte[planes[0].getBuffer().limit() - planes[0].getBuffer().position()];
            u = new byte[planes[1].getBuffer().limit() - planes[1].getBuffer().position()];
            v = new byte[planes[2].getBuffer().limit() - planes[2].getBuffer().position()];
//             初始化native层 编码,宽高要反着传
            this.livePusher.native_setVideoEncInfo(image.getHeight(),
                    image.getWidth(), 10, 640_000);
        }
        if (image.getPlanes()[0].getBuffer().remaining() == y.length) {
            planes[0].getBuffer().get(y);//转移到y的数组
            planes[1].getBuffer().get(u);
            planes[2].getBuffer().get(v);
            int stride = planes[0].getRowStride();
            //与我们设置的宽高是不一样的。这里是经过适配的
            Size size = new Size(image.getWidth(), image.getHeight());
            int width = size.getHeight();
            int heigth = planes[0].getRowStride();
            if (nv21 == null) {
                nv21 = new byte[heigth * width * 3 / 2];
                nv21_rotated = new byte[heigth * width * 3 / 2];
            }
            ImageUtil.yuvToNv21(y, u, v, nv21, heigth, width);
            ImageUtil.nv21_rotate_to_90(nv21, nv21_rotated, heigth, width);
//        nv21_rotated是一帧画面,是数据的起点,是旋转之后的。推流
            livePusher.native_pushVideo(nv21_rotated);
        }
        lock.unlock();

    }
    //用来做预览的,需要textureView
    @Override
    public void onUpdated(Preview.PreviewOutput output) {
        SurfaceTexture surfaceTexture = output.getSurfaceTexture();
        //我从摄像头拿到的textureView和自己保存的是不是同一个,竖屏变横屏的时候textureView就不是同一个了
        if (textureView.getSurfaceTexture() != surfaceTexture) {
            if (textureView.isAvailable()) {
                // 当切换摄像头时,如果不处理就会报错
                ViewGroup parent = (ViewGroup) textureView.getParent();
                parent.removeView(textureView);
//                竖屏变横屏的时候textureView就不是同一个了,重新添加
                parent.addView(textureView, 0);
                parent.requestLayout();
            }
            //绑定
            textureView.setSurfaceTexture(surfaceTexture);
        }
    }
    public void startLive() {
        isLiving = true;
    }
}

package com.maniu.x264rtmpmaniu;

import android.app.Activity;
import android.graphics.ImageFormat;
import android.hardware.Camera;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;

import com.maniu.x264rtmpmaniu.utils.ImageUtil;

import java.util.Iterator;
import java.util.List;

public class CameraHelper implements SurfaceHolder.Callback, Camera.PreviewCallback {
    private static final String TAG = "CameraHelper";
    private Activity mActivity;
    private int mHeight;
    private int mWidth;
    private int mCameraId;
    private Camera mCamera;
    private byte[] buffer;
    private byte[] yuv;
    private SurfaceHolder mSurfaceHolder;
    private Camera.PreviewCallback mPreviewCallback;
    private int mRotation;

    private OnChangedSizeListener mOnChangedSizeListener;
    public CameraHelper(Activity activity, int cameraId, int width, int height) {
        mActivity = activity;
        mCameraId = cameraId;
        mWidth = width;
        mHeight = height;
    }
    public void switchCamera() {
        if (mCameraId == Camera.CameraInfo.CAMERA_FACING_BACK) {
            mCameraId = Camera.CameraInfo.CAMERA_FACING_FRONT;
        } else {
            mCameraId = Camera.CameraInfo.CAMERA_FACING_BACK;
        }
        stopPreview();
        startPreview();
    }

    private void stopPreview() {
        if (mCamera != null) {
            //预览数据回调接口
            mCamera.setPreviewCallback(null);
            //停止预览
            mCamera.stopPreview();
            //释放摄像头
            mCamera.release();
            mCamera = null;
        }
    }

    private void startPreview() {
        try {
            //获得camera对象
            mCamera = Camera.open(mCameraId);
            //配置camera的属性
            Camera.Parameters parameters = mCamera.getParameters();
            //设置预览数据格式为nv21
            parameters.setPreviewFormat(ImageFormat.NV21);
            //这是摄像头宽、高
            setPreviewSize(parameters);
            // 设置摄像头 图像传感器的角度、方向
            setPreviewOrientation(parameters);
            mCamera.setParameters(parameters);
            buffer = new byte[mWidth * mHeight * 3 / 2];
            yuv= new byte[mWidth * mHeight * 3 / 2];
            //数据缓存区
            mCamera.addCallbackBuffer(buffer);
            mCamera.setPreviewCallbackWithBuffer(this);
            //设置预览画面
            mCamera.setPreviewDisplay(mSurfaceHolder);
            mOnChangedSizeListener.onChanged(mWidth, mHeight);
            mCamera.startPreview();
        } catch (Exception ex) {
            ex.printStackTrace();
        }
    }

    private void setPreviewOrientation(Camera.Parameters parameters) {
        Camera.CameraInfo info = new Camera.CameraInfo();
        Camera.getCameraInfo(mCameraId, info);
        mRotation = mActivity.getWindowManager().getDefaultDisplay().getRotation();
        int degrees = 0;
        switch (mRotation) {
            case Surface.ROTATION_0:
                degrees = 0;
                break;
            case Surface.ROTATION_90: // 横屏 左边是头部(home键在右边)
                degrees = 90;
                break;
            case Surface.ROTATION_180:
                degrees = 180;
                break;
            case Surface.ROTATION_270:// 横屏 头部在右边
                degrees = 270;
                break;
        }
        int result;
        if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
            result = (info.orientation + degrees) % 360;
            result = (360 - result) % 360; // compensate the mirror
        } else { // back-facing
            result = (info.orientation - degrees + 360) % 360;
        }
        //设置角度
        mCamera.setDisplayOrientation(result);
    }

    private void setPreviewSize(Camera.Parameters parameters) {
        //获取摄像头支持的宽、高
        List<Camera.Size> supportedPreviewSizes = parameters.getSupportedPreviewSizes();
        Camera.Size size = supportedPreviewSizes.get(0);
        Log.d(TAG, "支持 " + size.width + "x" + size.height);
        //选择一个与设置的差距最小的支持分辨率
        // 10x10 20x20 30x30
        // 12x12
        int m = Math.abs(size.height * size.width - mWidth * mHeight);
        supportedPreviewSizes.remove(0);
        Iterator<Camera.Size> iterator = supportedPreviewSizes.iterator();
        //遍历
        while (iterator.hasNext()) {
            Camera.Size next = iterator.next();
            Log.d(TAG, "支持 " + next.width + "x" + next.height);
            int n = Math.abs(next.height * next.width - mWidth * mHeight);
            if (n < m) {
                m = n;
                size = next;
            }
        }
        mWidth = size.width;
        mHeight = size.height;
        parameters.setPreviewSize(mWidth, mHeight);
        Log.d(TAG, "设置预览分辨率 width:" + size.width + " height:" + size.height);
    }


    public void setPreviewDisplay(SurfaceHolder surfaceHolder) {
        mSurfaceHolder = surfaceHolder;
        mSurfaceHolder.addCallback(this);
    }

    public void setPreviewCallback(Camera.PreviewCallback previewCallback) {
        mPreviewCallback = previewCallback;
    }

    @Override
    public void surfaceCreated(SurfaceHolder holder) {

    }

    @Override
    public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
        //释放摄像头
        stopPreview();
        //开启摄像头
        startPreview();
    }

    @Override
    public void surfaceDestroyed(SurfaceHolder holder) {
        stopPreview();
    }


    @Override
    public void onPreviewFrame(byte[] data, Camera camera) {
        // data数据依然是倒的
        ImageUtil.nv21_rotate_to_90(data, yuv, mWidth, mHeight);
//        caerma 1  camera 2   cameraX 拼装yuv
//        camera1  camerax  你只y u v
        mPreviewCallback.onPreviewFrame(yuv, camera);
        camera.addCallbackBuffer(buffer);
    }



    public void setOnChangedSizeListener(OnChangedSizeListener listener) {
        mOnChangedSizeListener = listener;
    }

    public interface OnChangedSizeListener {
        void onChanged(int w, int h);
    }
}

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值