Webrtc添加滤镜

webrtc 视频帧处理 可以通过自定义capture 集成第三方滤镜sdk 也可以直接修改webrtc源码 修改opengl render部分 本文只涉及 显示部分 ,如果使能远端 修改VideoFrame 封装数据为滤镜后的framebuffer数据

!!! capture render withe Texture  Mode    not yuv 

关键代码


package org.webrtc.filter;

import android.content.res.Resources;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.util.Log;
import android.util.SparseArray;

import org.webrtc.GlUtil;

import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import java.util.Arrays;


public abstract class AFilter {

    private static final String TAG="Filter";

    public static final int KEY_OUT=0x101;
    public static final int KEY_IN=0x102;
    public static final int KEY_INDEX=0x201;

    public static boolean DEBUG=true;
    /**
     * 单位矩阵
     */
    public static final float[] OM= MatrixUtils.getOriginalMatrix();
    /**
     * 程序句柄
     */
    protected int mProgram;
    /**
     * 顶点坐标句柄
     */
    protected int mHPosition;
    /**
     * 纹理坐标句柄
     */
    protected int mHCoord;
    /**
     * 总变换矩阵句柄
     */
    protected int mHMatrix;
    /**
     * 默认纹理贴图句柄
     */
    protected int mHTexture;

    protected Resources mRes;


    /**
     * 顶点坐标Buffer
     */
    protected FloatBuffer mVerBuffer;

    /**
     * 纹理坐标Buffer
     */
    protected FloatBuffer mTexBuffer;

    /**
     * 索引坐标Buffer
     */
    protected ShortBuffer mindexBuffer;

    protected int mFlag=0;

    private float[] matrix= Arrays.copyOf(OM,16);

    private int textureType=0;      //默认使用Texture2D0
    private int textureId=0;
    //顶点坐标
/*    private float pos[] = {
        -1.0f,  1.0f,
        -1.0f, -1.0f,
        1.0f, 1.0f,
        1.0f,  -1.0f,
    };

    //纹理坐标
    private float[] coord={
        0.0f, 0.0f,
        0.0f,  1.0f,
        1.0f,  0.0f,
        1.0f, 1.0f,
    };*/

    private float pos[] = {
            -1.0f,  1.0f,
            -1.0f, -1.0f,
            1.0f, 1.0f,
            1.0f,  -1.0f,
    };

    //纹理坐标
    private float[] coord={
            0.0f, 0.0f,
            0.0f,  1.0f,
            1.0f,  0.0f,
            1.0f, 1.0f,
    };

    private static final FloatBuffer FULL_RECTANGLE_BUFFER = GlUtil.createFloatBuffer(new float[]{
            -1.0f, -1.0f, // Bottom left.
            1.0f, -1.0f, // Bottom right.
            -1.0f, 1.0f, // Top left.
            1.0f, 1.0f, // Top right.
    });

    // Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
    private static final FloatBuffer FULL_RECTANGLE_TEXTURE_BUFFER =
            GlUtil.createFloatBuffer(new float[]{
                    0.0f, 0.0f, // Bottom left.
                    1.0f, 0.0f, // Bottom right.
                    0.0f, 1.0f, // Top left.
                    1.0f, 1.0f, // Top right.
            });


    private SparseArray<boolean[]> mBools;
    private SparseArray<int[]> mInts;
    private SparseArray<float[]> mFloats;

    public AFilter(Resources mRes){
        this.mRes=mRes;
        initBuffer();
    }

    public final void create(){
        onCreate();
    }

    public final void setSize(int width,int height){
        onSizeChanged(width,height);
    }

    public void draw(){
        onClear();
        onUseProgram();
        onSetExpandData();
        onBindTexture();
        onDraw();
    }

    public void setMatrix(float[] matrix){
        this.matrix=matrix;
    }

    public float[] getMatrix(){
        return matrix;
    }

    public final void setTextureType(int type){
        this.textureType=type;
    }

    public final int getTextureType(){
        return textureType;
    }

    public final int getTextureId(){
        return textureId;
    }

    public final void setTextureId(int textureId){
        this.textureId=textureId;
    }

    public void setFlag(int flag){
        this.mFlag=flag;
    }

    public int getFlag(){
        return mFlag;
    }

    public void setFloat(int type,float ... params){
        if(mFloats==null){
            mFloats=new SparseArray<>();
        }
        mFloats.put(type,params);
    }
    public void setInt(int type,int ... params){
        if(mInts==null){
            mInts=new SparseArray<>();
        }
        mInts.put(type,params);
    }
    public void setBool(int type,boolean ... params){
        if(mBools==null){
            mBools=new SparseArray<>();
        }
        mBools.put(type,params);
    }

    public boolean getBool(int type,int index) {
        if (mBools == null) return false;
        boolean[] b = mBools.get(type);
        return !(b == null || b.length <= index) && b[index];
    }

    public int getInt(int type,int index){
        if (mInts == null) return 0;
        int[] b = mInts.get(type);
        if(b == null || b.length <= index){
            return 0;
        }
        return b[index];
    }

    public float getFloat(int type,int index){
        if (mFloats == null) return 0;
        float[] b = mFloats.get(type);
        if(b == null || b.length <= index){
            return 0;
        }
        return b[index];
    }

    public int getOutputTexture(){
        return -1;
    }

    /**
     * 实现此方法,完成程序的创建,可直接调用createProgram来实现
     */
    protected abstract void onCreate();
    protected abstract void onSizeChanged(int width,int height);

    protected final void createProgram(String vertex, String fragment){
        mProgram= uCreateGlProgram(vertex,fragment);
        mHPosition= GLES20.glGetAttribLocation(mProgram, "vPosition");
        mHCoord= GLES20.glGetAttribLocation(mProgram,"vCoord");
        mHMatrix= GLES20.glGetUniformLocation(mProgram,"vMatrix");
        mHTexture= GLES20.glGetUniformLocation(mProgram,"vTexture");
    }

    protected final void createProgramByAssetsFile(String vertex, String fragment){
        createProgram(uRes(mRes,vertex),uRes(mRes,fragment));
    }

    /**
     * Buffer初始化
     */
    protected void initBuffer(){
        ByteBuffer a= ByteBuffer.allocateDirect(32);
        a.order(ByteOrder.nativeOrder());
        mVerBuffer=a.asFloatBuffer();
        mVerBuffer.put(pos);
        mVerBuffer.position(0);
        ByteBuffer b= ByteBuffer.allocateDirect(32);
        b.order(ByteOrder.nativeOrder());
        mTexBuffer=b.asFloatBuffer();
        mTexBuffer.put(coord);
        mTexBuffer.position(0);
    }

    protected void onUseProgram(){
        GLES20.glUseProgram(mProgram);
    }

    /**
     * 启用顶点坐标和纹理坐标进行绘制
     */
    protected void onDraw(){
        GLES20.glEnableVertexAttribArray(mHPosition);
        GLES20.glVertexAttribPointer(mHPosition,2, GLES20.GL_FLOAT, false, 0,mVerBuffer);
        GLES20.glEnableVertexAttribArray(mHCoord);
        GLES20.glVertexAttribPointer(mHCoord, 2, GLES20.GL_FLOAT, false, 0, mTexBuffer);
        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP,0,4);
        GLES20.glDisableVertexAttribArray(mHPosition);
        GLES20.glDisableVertexAttribArray(mHCoord);
    }

    /**
     * 清除画布
     */
    protected void onClear(){
        GLES20.glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
    }

    /**
     * 设置其他扩展数据
     */
    protected void onSetExpandData(){
        GLES20.glUniformMatrix4fv(mHMatrix,1,false,matrix,0);
    }

    /**
     * 绑定默认纹理
     */
    protected void onBindTexture(){
        GLES20.glActiveTexture(GLES20.GL_TEXTURE0+textureType);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,getTextureId());
        GLES20.glUniform1i(mHTexture,textureType);
    }

    public static void glError(int code, Object index){
        if(DEBUG&&code!=0){
            Log.e(TAG,"glError:"+code+"---"+index);
        }
    }

    //通过路径加载Assets中的文本内容
    public static String uRes(Resources mRes, String path){
        StringBuilder result=new StringBuilder();
        try{
            InputStream is=mRes.getAssets().open(path);
            int ch;
            byte[] buffer=new byte[1024];
            while (-1!=(ch=is.read(buffer))){
                result.append(new String(buffer,0,ch));
            }
        }catch (Exception e){
            return null;
        }
        return result.toString().replaceAll("\\r\\n","\n");
    }

    //创建GL程序
    public static int uCreateGlProgram(String vertexSource, String fragmentSource){
        int vertex=uLoadShader(GLES20.GL_VERTEX_SHADER,vertexSource);
        if(vertex==0)return 0;
        int fragment=uLoadShader(GLES20.GL_FRAGMENT_SHADER,fragmentSource);
        if(fragment==0)return 0;
        int program= GLES20.glCreateProgram();
        if(program!=0){
            GLES20.glAttachShader(program,vertex);
            GLES20.glAttachShader(program,fragment);
            GLES20.glLinkProgram(program);
            int[] linkStatus=new int[1];
            GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS,linkStatus,0);
            if(linkStatus[0]!= GLES20.GL_TRUE){
                glError(1,"Could not link program:"+ GLES20.glGetProgramInfoLog(program));
                GLES20.glDeleteProgram(program);
                program=0;
            }
        }
        return program;
    }

    //加载shader
    public static int uLoadShader(int shaderType, String source){
        int shader= GLES20.glCreateShader(shaderType);
        if(0!=shader){
            GLES20.glShaderSource(shader,source);
            GLES20.glCompileShader(shader);
            int[] compiled=new int[1];
            GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS,compiled,0);
            if(compiled[0]==0){
                glError(1,"Could not compile shader:"+shaderType);
                glError(1,"GLES20 Error:"+ GLES20.glGetShaderInfoLog(shader));
                GLES20.glDeleteShader(shader);
                shader=0;
            }
        }
        return shader;
    }


}

//GrayFilter

//
// Source code recreated from a .class file by IntelliJ IDEA
// (powered by Fernflower decompiler)
//

package org.webrtc.filter;

import android.content.res.Resources;

public class GrayFilter extends AFilter {
    public GrayFilter(Resources mRes) {
        super(mRes);
    }

    protected void onCreate() {
        this.createProgramByAssetsFile("shader/base_vertex.sh", "shader/color/gray_fragment.frag");
    }

    protected void onSizeChanged(int width, int height) {
    }
}

webrtc render

/*
 *  Copyright 2016 The WebRTC project authors. All Rights Reserved.
 *
 *  Use of this source code is governed by a BSD-style license
 *  that can be found in the LICENSE file in the root of the source
 *  tree. An additional intellectual property rights grant can be found
 *  in the file PATENTS.  All contributing project authors may
 *  be found in the AUTHORS file in the root of the source tree.
 */

package org.webrtc;



/**
 * Implements VideoSink by displaying the video stream on an EGL Surface. This class is intended to
 * be used as a helper class for rendering on SurfaceViews and TextureViews.
 */
public class EglRenderer implements VideoSink {
    private static final String TAG = "EglRenderer";
    private static final long LOG_INTERVAL_SEC = 4;
    private int width;
    private int height;

    public interface FrameListener {
        void onFrame(Bitmap frame);
    }

    /**
     * Callback for clients to be notified about errors encountered during rendering.
     */
    public static interface ErrorCallback {
        /**
         * Called if GLES20.GL_OUT_OF_MEMORY is encountered during rendering.
         */
        void onGlOutOfMemory();
    }

    private static class FrameListenerAndParams {
        public final FrameListener listener;
        public final float scale;
        public final RendererCommon.GlDrawer drawer;
        public final boolean applyFpsReduction;

        public FrameListenerAndParams(FrameListener listener, float scale,
                                      RendererCommon.GlDrawer drawer, boolean applyFpsReduction) {
            this.listener = listener;
            this.scale = scale;
            this.drawer = drawer;
            this.applyFpsReduction = applyFpsReduction;
        }
    }

    private class EglSurfaceCreation implements Runnable {
        private Object surface;

        // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
        @SuppressWarnings("NoSynchronizedMethodCheck")
        public synchronized void setSurface(Object surface) {
            this.surface = surface;
        }

       .....
    }

    /**
     * Handler that triggers a callback when an uncaught exception happens when handling a message.
     */
    private static class HandlerWithExceptionCallback extends Handler {
        private final Runnable exceptionCallback;

        public HandlerWithExceptionCallback(Looper looper, Runnable exceptionCallback) {
            super(looper);
            this.exceptionCallback = exceptionCallback;
        }

        @Override
        public void dispatchMessage(Message msg) {
            try {
                super.dispatchMessage(msg);
            } catch (Exception e) {
                Logging.e(TAG, "Exception on EglRenderer thread", e);
                exceptionCallback.run();
                throw e;
            }
        }
    }

    protected final String name;

    // |renderThreadHandler| is a handler for communicating with |renderThread|, and is synchronized
    // on |handlerLock|.
    private final Object handlerLock = new Object();
    @Nullable
    private Handler renderThreadHandler;

    private final ArrayList<FrameListenerAndParams> frameListeners = new ArrayList<>();

    private volatile ErrorCallback errorCallback;

    // Variables for fps reduction.
    private final Object fpsReductionLock = new Object();
    // Time for when next frame should be rendered.
    private long nextFrameTimeNs;
    // Minimum duration between frames when fps reduction is active, or -1 if video is completely
    // paused.
    private long minRenderPeriodNs;

    // EGL and GL resources for drawing YUV/OES textures. After initialization, these are only
    // accessed from the render thread.
    @Nullable
    private EglBase eglBase;
    private final VideoFrameDrawer frameDrawer;
    @Nullable
    private RendererCommon.GlDrawer drawer;
    private boolean usePresentationTimeStamp;
    private final Matrix drawMatrix = new Matrix();

    // Pending frame to render. Serves as a queue with size 1. Synchronized on |frameLock|.
    private final Object frameLock = new Object();
    @Nullable
    private VideoFrame pendingFrame;

   ....

    private final Runnable logStatisticsRunnable = new Runnable() {
        @Override
        public void run() {
            logStatistics();
            synchronized (handlerLock) {
                if (renderThreadHandler != null) {
                    renderThreadHandler.removeCallbacks(logStatisticsRunnable);
                    renderThreadHandler.postDelayed(
                            logStatisticsRunnable, TimeUnit.SECONDS.toMillis(LOG_INTERVAL_SEC));
                }
            }
        }
    };

    private final EglSurfaceCreation eglSurfaceCreationRunnable = new EglSurfaceCreation();


    private AFilter filter;

    //创建离屏buffer,用于最后导出数据
    private int[] mExportFrame = new int[1];
    private int[] mExportTexture = new int[1];

    private void deleteFrameBuffer() {
        GLES20.glDeleteFramebuffers(1, mExportFrame, 0);
        GLES20.glDeleteTextures(1, mExportTexture, 0);
    }

    /**
     * Standard constructor. The name will be used for the render thread name and included when
     * logging. In order to render something, you must first call init() and createEglSurface.
     */
    public EglRenderer(String name) {
        this(name, new VideoFrameDrawer());
    }

    public EglRenderer(String name, VideoFrameDrawer videoFrameDrawer) {
        this.name = name;
        this.frameDrawer = videoFrameDrawer;

    }

    public AFilter getFilter() {
        return filter;
    }

    /**
     * Initialize this class, sharing resources with |sharedContext|. The custom |drawer| will be used
     * for drawing frames on the EGLSurface. This class is responsible for calling release() on
     * |drawer|. It is allowed to call init() to reinitialize the renderer after a previous
     * init()/release() cycle. If usePresentationTimeStamp is true, eglPresentationTimeANDROID will be
     * set with the frame timestamps, which specifies desired presentation time and might be useful
     * for e.g. syncing audio and video.
     */
    public void init(@Nullable final EglBase.Context sharedContext, final int[] configAttributes,
                     RendererCommon.GlDrawer drawer, boolean usePresentationTimeStamp) {
        synchronized (handlerLock) {
            if (renderThreadHandler != null) {
                throw new IllegalStateException(name + "Already initialized");
            }
            logD("Initializing EglRenderer");
            this.drawer = drawer;
            this.usePresentationTimeStamp = usePresentationTimeStamp;

            final HandlerThread renderThread = new HandlerThread(name + "EglRenderer");
            renderThread.start();
            renderThreadHandler =
                    new HandlerWithExceptionCallback(renderThread.getLooper(), new Runnable() {
                        @Override
                        public void run() {
                            synchronized (handlerLock) {
                                renderThreadHandler = null;
                            }
                        }
                    });
            // Create EGL context on the newly created render thread. It should be possibly to create the
            // context on this thread and make it current on the render thread, but this causes failure on
            // some Marvel based JB devices. https://bugs.chromium.org/p/webrtc/issues/detail?id=6350.
            ThreadUtils.invokeAtFrontUninterruptibly(renderThreadHandler, () -> {
                // If sharedContext is null, then texture frames are disabled. This is typically for old
                // devices that might not be fully spec compliant, so force EGL 1.0 since EGL 1.4 has
             .....

        }
    }

    public void setFilter(AFilter aFilter) {
        this.filter = aFilter;
    }

    /**
     * Same as above with usePresentationTimeStamp set to false.
     *
     * @see #init(EglBase.Context, int[], RendererCommon.GlDrawer, boolean)
     */
    public void init(@Nullable final EglBase.Context sharedContext, final int[] configAttributes,
                     RendererCommon.GlDrawer drawer) {
        init(sharedContext, configAttributes, drawer, /* usePresentationTimeStamp= */ false);
    }

    public void createEglSurface(Surface surface) {
        createEglSurfaceInternal(surface);
        if(null!=filter) {
            Log.e(TAG,"filter create");
            filter.create();
        }
    }

    public void createEglSurface(SurfaceTexture surfaceTexture) {
        createEglSurfaceInternal(surfaceTexture);
    }

    private void createEglSurfaceInternal(Object surface) {
        eglSurfaceCreationRunnable.setSurface(surface);
        postToRenderThread(eglSurfaceCreationRunnable);
    }

    /**
     * Block until any pending frame is returned and all GL resources released, even if an interrupt
     * occurs. If an interrupt occurs during release(), the interrupt flag will be set. This function
     * should be called before the Activity is destroyed and the EGLContext is still valid. If you
     * don't call this function, the GL resources might leak.
     */
    public void release() {
        logD("Releasing.");

        deleteFrameBuffer();
        EasyGlUtils.unBindFrameBuffer();
        final CountDownLatch eglCleanupBarrier = new CountDownLatch(1);
        ...
    }

    /**
     * Reset the statistics logged in logStatistics().
     */
    private void resetStatistics(long currentTimeNs) {
        synchronized (statisticsLock) {
            statisticsStartTimeNs = currentTimeNs;
            framesReceived = 0;
            framesDropped = 0;
            framesRendered = 0;
            renderTimeNs = 0;
            renderSwapBufferTimeNs = 0;
        }
    }

    public void printStackTrace() {
        synchronized (handlerLock) {
            final Thread renderThread =
                    (renderThreadHandler == null) ? null : renderThreadHandler.getLooper().getThread();
            if (renderThread != null) {
                final StackTraceElement[] renderStackTrace = renderThread.getStackTrace();
                if (renderStackTrace.length > 0) {
                    logW("EglRenderer stack trace:");
                    for (StackTraceElement traceElem : renderStackTrace) {
                        logW(traceElem.toString());
                    }
                }
            }
        }
    }

    /**
     * Set if the video stream should be mirrored horizontally or not.
     */
    public void setMirror(final boolean mirror) {
        logD("setMirrorHorizontally: " + mirror);
        synchronized (layoutLock) {
            this.mirrorHorizontally = mirror;
        }
    }

    /**
     * Set if the video stream should be mirrored vertically or not.
     */
    public void setMirrorVertically(final boolean mirrorVertically) {
        logD("setMirrorVertically: " + mirrorVertically);
        synchronized (layoutLock) {
            this.mirrorVertically = mirrorVertically;
        }
    }

    public void onSurfaceChanged(SurfaceHolder holder, int format, int width, int height) {
        if (this.width != width || this.height != height) {
            this.width = width;
            this.height = height;
            //创建FrameBuffer和Texture
            deleteFrameBuffer();
            GLES20.glGenFramebuffers(1, mExportFrame, 0);
            EasyGlUtils.genTexturesWithParameter(1, mExportTexture, 0, GLES20.GL_RGBA, width, height);
        }
    }

    /**
     * Set layout aspect ratio. This is used to crop frames when rendering to avoid stretched video.
     * Set this to 0 to disable cropping.
     */
    public void setLayoutAspectRatio(float layoutAspectRatio) {
        logD("setLayoutAspectRatio: " + layoutAspectRatio);
        synchronized (layoutLock) {
            this.layoutAspectRatio = layoutAspectRatio;
        }
    }

    /**
     * Limit render framerate.
     *
     * @param fps Limit render framerate to this value, or use Float.POSITIVE_INFINITY to disable fps
     *            reduction.
     */
    public void setFpsReduction(float fps) {
        logD("setFpsReduction: " + fps);
        synchronized (fpsReductionLock) {
            final long previousRenderPeriodNs = minRenderPeriodNs;
            if (fps <= 0) {
                minRenderPeriodNs = Long.MAX_VALUE;
            } else {
                minRenderPeriodNs = (long) (TimeUnit.SECONDS.toNanos(1) / fps);
            }
            if (minRenderPeriodNs != previousRenderPeriodNs) {
                // Fps reduction changed - reset frame time.
                nextFrameTimeNs = System.nanoTime();
            }
        }
    }

    public void disableFpsReduction() {
        setFpsReduction(Float.POSITIVE_INFINITY /* fps */);
    }

    public void pauseVideo() {
        setFpsReduction(0 /* fps */);
    }

    /**
     * Register a callback to be invoked when a new video frame has been received. This version uses
     * the drawer of the EglRenderer that was passed in init.
     *
     * @param listener The callback to be invoked. The callback will be invoked on the render thread.
     *                 It should be lightweight and must not call removeFrameListener.
     * @param scale    The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
     *                 required.
     */
    public void addFrameListener(final FrameListener listener, final float scale) {
        addFrameListener(listener, scale, null, false /* applyFpsReduction */);
    }

    /**
     * Register a callback to be invoked when a new video frame has been received.
     *
     * @param listener The callback to be invoked. The callback will be invoked on the render thread.
     *                 It should be lightweight and must not call removeFrameListener.
     * @param scale    The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
     *                 required.
     * @param drawer   Custom drawer to use for this frame listener or null to use the default one.
     */
    public void addFrameListener(
            final FrameListener listener, final float scale, final RendererCommon.GlDrawer drawerParam) {
        addFrameListener(listener, scale, drawerParam, false /* applyFpsReduction */);
    }

    /**
     * Register a callback to be invoked when a new video frame has been received.
     *
     * @param listener          The callback to be invoked. The callback will be invoked on the render thread.
     *                          It should be lightweight and must not call removeFrameListener.
     * @param scale             The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
     *                          required.
     * @param drawer            Custom drawer to use for this frame listener or null to use the default one.
     * @param applyFpsReduction This callback will not be called for frames that have been dropped by
     *                          FPS reduction.
     */
    public void addFrameListener(final FrameListener listener, final float scale,
                                 @Nullable final RendererCommon.GlDrawer drawerParam, final boolean applyFpsReduction) {
        postToRenderThread(() -> {
            final RendererCommon.GlDrawer listenerDrawer = drawerParam == null ? drawer : drawerParam;
            frameListeners.add(
                    new FrameListenerAndParams(listener, scale, listenerDrawer, applyFpsReduction));
        });
    }

    /**
     * Remove any pending callback that was added with addFrameListener. If the callback is not in
     * the queue, nothing happens. It is ensured that callback won't be called after this method
     * returns.
     *
     * @param runnable The callback to remove.
     */
    public void removeFrameListener(final FrameListener listener) {
        final CountDownLatch latch = new CountDownLatch(1);
        synchronized (handlerLock) {
            if (renderThreadHandler == null) {
                return;
            }
            if (Thread.currentThread() == renderThreadHandler.getLooper().getThread()) {
                throw new RuntimeException("removeFrameListener must not be called on the render thread.");
            }
            postToRenderThread(() -> {
                latch.countDown();
                final Iterator<FrameListenerAndParams> iter = frameListeners.iterator();
                while (iter.hasNext()) {
                    if (iter.next().listener == listener) {
                        iter.remove();
                    }
                }
            });
        }
        ThreadUtils.awaitUninterruptibly(latch);
    }

    /**
     * Can be set in order to be notified about errors encountered during rendering.
     */
    public void setErrorCallback(ErrorCallback errorCallback) {
        this.errorCallback = errorCallback;
    }

    // VideoSink interface.
    @Override
    public void onFrame(VideoFrame frame) {
        synchronized (statisticsLock) {
            ++framesReceived;
        }
        final boolean dropOldFrame;
        synchronized (handlerLock) {
            if (renderThreadHandler == null) {
                logD("Dropping frame - Not initialized or already released.");
                return;
            }
            synchronized (frameLock) {
                dropOldFrame = (pendingFrame != null);
                if (dropOldFrame) {
                    pendingFrame.release();
                }
                pendingFrame = frame;
                pendingFrame.retain();
                renderThreadHandler.post(this::renderFrameOnRenderThread);
            }
        }
        if (dropOldFrame) {
            synchronized (statisticsLock) {
                ++framesDropped;
            }
        }
    }

    /**
     * Release EGL surface. This function will block until the EGL surface is released.
     */
    public void releaseEglSurface(final Runnable completionCallback) {

        // Ensure that the render thread is no longer touching the Surface before returning from this
        // function.
        eglSurfaceCreationRunnable.setSurface(null /* surface */);
        synchronized (handlerLock) {
            if (renderThreadHandler != null) {
                renderThreadHandler.removeCallbacks(eglSurfaceCreationRunnable);
                renderThreadHandler.postAtFrontOfQueue(() -> {
                    if (eglBase != null) {
                        eglBase.detachCurrent();
                        eglBase.releaseSurface();
                    }
                    completionCallback.run();
                });
                return;
            }
        }
        completionCallback.run();
    }

    /**
     * Private helper function to post tasks safely.
     */
    private void postToRenderThread(Runnable runnable) {
        synchronized (handlerLock) {
            if (renderThreadHandler != null) {
                renderThreadHandler.post(runnable);
            }
        }
    }

    private void clearSurfaceOnRenderThread(float r, float g, float b, float a) {
        if (eglBase != null && eglBase.hasSurface()) {
            logD("clearSurface");
            GLES20.glClearColor(r, g, b, a);
            GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
            eglBase.swapBuffers();
        }
    }

    /**
     * Post a task to clear the surface to a transparent uniform color.
     */
    public void clearImage() {
        clearImage(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
    }

    /**
     * Post a task to clear the surface to a specific color.
     */
    public void clearImage(final float r, final float g, final float b, final float a) {
        synchronized (handlerLock) {
            if (renderThreadHandler == null) {
                return;
            }
            renderThreadHandler.postAtFrontOfQueue(() -> clearSurfaceOnRenderThread(r, g, b, a));
        }
    }

    /**
     * Renders and releases |pendingFrame|.
     */
    private void renderFrameOnRenderThread() {
        // Fetch and render |pendingFrame|.
        final VideoFrame frame;
        synchronized (frameLock) {
            if (pendingFrame == null) {
                return;
            }
            frame = pendingFrame;
            pendingFrame = null;
        }
        if (eglBase == null || !eglBase.hasSurface()) {
            logD("Dropping frame - No surface");
            frame.release();
            return;
        }
        // Check if fps reduction is active.
        final boolean shouldRenderFrame;
        synchronized (fpsReductionLock) {
            if (minRenderPeriodNs == Long.MAX_VALUE) {
                // Rendering is paused.
                shouldRenderFrame = false;
            } else if (minRenderPeriodNs <= 0) {
                // FPS reduction is disabled.
                shouldRenderFrame = true;
            } else {
                final long currentTimeNs = System.nanoTime();
                if (currentTimeNs < nextFrameTimeNs) {
                    logD("Skipping frame rendering - fps reduction is active.");
                    shouldRenderFrame = false;
                } else {
                    nextFrameTimeNs += minRenderPeriodNs;
                    // The time for the next frame should always be in the future.
                    nextFrameTimeNs = Math.max(nextFrameTimeNs, currentTimeNs);
                    shouldRenderFrame = true;
                }
            }
        }

        final long startTimeNs = System.nanoTime();

        final float frameAspectRatio = frame.getRotatedWidth() / (float) frame.getRotatedHeight();
        final float drawnAspectRatio;
        synchronized (layoutLock) {
            drawnAspectRatio = layoutAspectRatio != 0f ? layoutAspectRatio : frameAspectRatio;
        }

        final float scaleX;
        final float scaleY;

        if (frameAspectRatio > drawnAspectRatio) {
            scaleX = drawnAspectRatio / frameAspectRatio;
            scaleY = 1f;
        } else {
            scaleX = 1f;
            scaleY = frameAspectRatio / drawnAspectRatio;
        }

        drawMatrix.reset();
        drawMatrix.preTranslate(0.5f, 0.5f);
        drawMatrix.preScale(mirrorHorizontally ? -1f : 1f, mirrorVertically ? -1f : 1f);
        drawMatrix.preScale(scaleX, scaleY);
        drawMatrix.preTranslate(-0.5f, -0.5f);


        final long swapBuffersStartTimeNs = System.nanoTime();
        if (usePresentationTimeStamp) {
            eglBase.swapBuffers(frame.getTimestampNs());
        } else {
            eglBase.swapBuffers();
        }

        try {
            if (shouldRenderFrame) {
                final boolean isTextureFrame = frame.getBuffer() instanceof VideoFrame.TextureBuffer;
                //fbo
                if(isTextureFrame) {
                    EasyGlUtils.bindFrameTexture(mExportFrame[0], mExportTexture[0]);
                }
                GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
                GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
                frameDrawer.drawFrame(frame, drawer, drawMatrix, 0 /* viewportX */, 0 /* viewportY */,
                        eglBase.surfaceWidth(), eglBase.surfaceHeight());

                //filter
                if(isTextureFrame) {
                    EasyGlUtils.unBindFrameBuffer();
                }

                if (isTextureFrame) {
                    if (filter != null && isTextureFrame) {

//                        Matrix finalMatrix = new Matrix(buffer.getTransformMatrix());
//                        finalMatrix.preConcat(renderMatrix);
//                        float[] finalGlMatrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(finalMatrix);

                 
                        filter.setTextureId(mExportTexture[0]);
                        filter.draw();
                    }
                } else {
//                  int textureId = ((VideoFrame.TextureBuffer) frame.getBuffer()).getTextureId();
//                  if (filter != null && isTextureFrame) {
//                    filter.onDraw(textureId, glCubeBuffer, glTextureBuffer);
//                  }
                }

//                final long swapBuffersStartTimeNs = System.nanoTime();
//                if (usePresentationTimeStamp) {
//                    eglBase.swapBuffers(frame.getTimestampNs());
//                } else {
//                    eglBase.swapBuffers();
//                }

                final long currentTimeNs = System.nanoTime();
                synchronized (statisticsLock) {
                    ++framesRendered;
                    renderTimeNs += (currentTimeNs - startTimeNs);
                    renderSwapBufferTimeNs += (currentTimeNs - swapBuffersStartTimeNs);
                }
            }

            notifyCallbacks(frame, shouldRenderFrame);
        } catch (GlUtil.GlOutOfMemoryException e) {
            logE("Error while drawing frame", e);
            final ErrorCallback errorCallback = this.errorCallback;
            if (errorCallback != null) {
                errorCallback.onGlOutOfMemory();
            }
            // Attempt to free up some resources.
            drawer.release();
            frameDrawer.release();
            bitmapTextureFramebuffer.release();
            // Continue here on purpose and retry again for next frame. In worst case, this is a continous
            // problem and no more frames will be drawn.
        } finally {
            frame.release();
        }
    }

   

   ...

 
}

//setFilter

  /**
   * Standard View constructor. In order to render something, you must first call init().
   */
  public SurfaceViewRenderer(Context context) {
    super(context);
    this.resourceName = getResourceName();
    eglRenderer = new SurfaceEglRenderer(resourceName);
    eglRenderer.setFilter(new NoFilter(context.getResources()));
    getHolder().addCallback(this);
    getHolder().addCallback(eglRenderer);
  }

EglUtils

 public static void useTexParameter(){
        //设置缩小过滤为使用纹理中坐标最接近的一个像素的颜色作为需要绘制的像素颜色
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER,GLES20.GL_NEAREST);
        //设置放大过滤为使用纹理中坐标最接近的若干个颜色,通过加权平均算法得到需要绘制的像素颜色
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER,GLES20.GL_LINEAR);
        //设置环绕方向S,截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S,GLES20.GL_CLAMP_TO_EDGE);
        //设置环绕方向T,截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T,GLES20.GL_CLAMP_TO_EDGE);
    }

    public static void useTexParameter(int gl_wrap_s,int gl_wrap_t,int gl_min_filter,
                                       int gl_mag_filter){
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S,gl_wrap_s);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T,gl_wrap_t);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER,gl_min_filter);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER,gl_mag_filter);
    }

    public static void genTexturesWithParameter(int size,int[] textures,int start,
                             int gl_format,int width,int height){
        GLES20.glGenTextures(size, textures, start);
        for (int i = 0; i < size; i++) {
            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[i]);
            GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0,gl_format, width, height,
                0, gl_format, GLES20.GL_UNSIGNED_BYTE, null);
            useTexParameter();
        }
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,0);
    }

    public static void bindFrameTexture(int frameBufferId,int textureId){
        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId);
        GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
            GLES20.GL_TEXTURE_2D, textureId, 0);
    }

    public static void unBindFrameBuffer(){
        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER,0);
    }

//效果图 Gray

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 9
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 9
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值