Android固定帧率录屏

最近搞了一个固定的帧率录屏的软件,硬生生把我一个对这一块的小白搞成了对这一块有深刻理解的人。

固定帧率录制就是MediaCodec中的Surface自定义一个,然后MediaCodec的Surface和自己定义的进行数据的传输。

Demo暂时不放了,出于公司的原因。关键代码已有,拷贝即可使用。

上代码:

1.录屏类:

public class MyShoter implements Runnable {

    private static final String TAG = MyShoter.class.getSimpleName();

    private static final String VIDEO_MIME_TYPE = "video/avc";
    private static final int IFRAME_INTERVAL = 10;
    private static final int TIMEOUT_US = 10000;


    /**
     * 录制的分辨率
     */
    private int mWidth;
    private int mHeight;

    /**
     * 录制的比特率
     */
    private int mBitRate;

    /**
     * 录制的帧数
     */
    private int mFrameRate;     // 30 FPS

    /**
     * 这个数传 1 就可以
     */
    private int mDpi;

    /**
     * 文件保存路径
     */
    private String mSaveFilePath;

    private MediaProjection mMediaProjection;
    private MediaCodec mVideoEncoder;
    private Surface mSurface;
    private MediaMuxer mMuxer;

    private boolean mMuxerStarted = false;
    private int mVideoTrackIndex = -1;

    private AtomicBoolean mAtomicQuit = new AtomicBoolean(false);

    private MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();

    private VirtualDisplay mVirtualDisplay;
    private DisplayManager displayManager;

    public MyShoter(int width, int height, int bitRate, int dpi, int fps,
                    MediaProjection mediaProjection, String filePath) {
        this.mWidth = width;
        this.mHeight = height;
        this.mBitRate = bitRate;
        this.mDpi = dpi;
        this.mFrameRate = fps;
        this.mMediaProjection = mediaProjection;
        if (!filePath.endsWith(".mp4"))
            filePath += ".mp4";
        this.mSaveFilePath = filePath;
    }


    public void stopShot() {
        if (eglRender != null) {
            eglRender.stop();
        }
    }


    @TargetApi(Build.VERSION_CODES.LOLLIPOP)
    @Override
    public void run() {

        Log.e(TAG, "run: " + mWidth + "   " + mHeight + "  " + mBitRate + "  " + mDpi + "  " +
                mFrameRate);

        try {

            prepareEncoder();

            mMuxer = new MediaMuxer(mSaveFilePath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
            Log.e(TAG, "run: mMuxer init");
            int formatWidth = mWidth;
            int formatHeight = mHeight;
            if ((formatWidth & 1) == 1) {
                formatWidth--;
            }
            if ((formatHeight & 1) == 1) {
                formatHeight--;
            }
            if (mMediaProjection != null) {
                mVirtualDisplay = mMediaProjection.createVirtualDisplay("screen", formatWidth, formatHeight, mDpi,
                        DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC, eglRender.getDecodeSurface(), null, null);
            } else {
                mVirtualDisplay = displayManager.createVirtualDisplay("screen", formatWidth, formatHeight, mDpi,
                        eglRender.getDecodeSurface(), DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC);
            }
            startRecordScreen();
        } catch (IOException e) {
            e.printStackTrace();
            throw new RuntimeException("Muxur:::", e);
        } finally {
            releaseEncoder();
        }
    }


    /**
     * 开始录屏
     */
    private void startRecordScreen() {
        eglRender.start();
        releaseEncoder();
    }

    private EGLRender eglRender;

    private void prepareEncoder() throws IOException {
        int formatWidth = mWidth;
        int formatHeight = mHeight;
        if ((formatWidth & 1) == 1) {
            formatWidth--;
        }
        if ((formatHeight & 1) == 1) {
            formatHeight--;
        }
        //类型
        MediaFormat format =
                MediaFormat.createVideoFormat(VIDEO_MIME_TYPE, formatWidth, formatHeight);

        //颜色格式
        //从Surface当中获取的
        format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
                MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
        //码率
        format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
        //帧率
        format.setInteger(MediaFormat.KEY_FRAME_RATE, mFrameRate);
        //关键帧间隔
        format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
        //编码器
        mVideoEncoder = MediaCodec.createEncoderByType(VIDEO_MIME_TYPE);
        //将参数配置给编码器
        mVideoEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        //交给虚拟屏幕 通过OpenGL 将预览的纹理 会知道这一个虚拟屏幕中
        //这样MediaCodec就会自动编码mInputSurface当中的图像了
        mSurface = mVideoEncoder.createInputSurface();
        //录屏的帧率
        eglRender = new EGLRender(mSurface, formatWidth, formatHeight, mFrameRate);
        eglRender.setCallBack(new EGLRender.onFrameCallBack() {
            @Override
            public void onUpdate() {
                startEncode();

            }

            @Override
            public void onCutScreen(Bitmap bitmap) {

            }
        });
        mVideoEncoder.start();
    }

    private void startEncode() {
        ByteBuffer[] byteBuffers = null;
        if (Build.VERSION.SDK_INT< 21) {
            byteBuffers = mVideoEncoder.getOutputBuffers();
        }
        int index = mVideoEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_US);
        if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            resetOutputFormatNew();
        } else if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
            Log.d("---", "retrieving buffers time out!");
            try {
                // wait 10ms
                Thread.sleep(10);
            } catch (InterruptedException e) {
            }
        } else if (index >= 0) {
            if (Build.VERSION.SDK_INT< 21) {
                encodeToVideoTrackNew(byteBuffers[index]);
            } else {
                encodeToVideoTrackNew(mVideoEncoder.getOutputBuffer(index));
            }
            mVideoEncoder.releaseOutputBuffer(index, false);
        }
    }
    private byte[] sps=null;
    private byte[] pps=null;

    private void encodeToVideoTrackNew(ByteBuffer encodeData) {
        if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
            Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
            mBufferInfo.size = 0;
        }
        if (mBufferInfo.size == 0) {
            Log.d(TAG, "info.size == 0, drop it.");
            encodeData = null;
        } else {
            Log.d(TAG, "got buffer, info: size=" + mBufferInfo.size
                    + ", presentationTimeUs=" + mBufferInfo.presentationTimeUs
                    + ", offset=" + mBufferInfo.offset);
        }
        if (encodeData != null) {
            encodeData.position(mBufferInfo.offset);
            encodeData.limit(mBufferInfo.offset + mBufferInfo.size);
            mMuxer.writeSampleData(mVideoTrackIndex, encodeData, mBufferInfo);
            byte[] bytes;
            if (mBufferInfo.flags == MediaCodec.BUFFER_FLAG_KEY_FRAME) {
                //todo 关键帧上添加sps,和pps信息
                bytes = new byte[mBufferInfo.size + sps.length + pps.length];
                System.arraycopy(sps, 0, bytes, 0, sps.length);
                System.arraycopy(pps, 0, bytes, sps.length, pps.length);
                encodeData.get(bytes, sps.length + pps.length, mBufferInfo.size);
            } else {
                bytes = new byte[mBufferInfo.size];
                encodeData.get(bytes, 0, mBufferInfo.size);
            }
            Log.e("---", "send:" + mBufferInfo.size +"\tflag:" + mBufferInfo.flags);
        }
    }

    private void shotDisplay() {
        while (!mAtomicQuit.get()) {
            int bufferIndex = mVideoEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_US);

            if (bufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                resetOutputFormat();
            } else if (bufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
                SystemClock.sleep(10);
            } else if (bufferIndex >= 0) {
                if (!mMuxerStarted) {
                    throw new IllegalStateException("MediaMuxer is  not add the  addTrack format ");
                }

                encodeToVideoTrack(bufferIndex);

                mVideoEncoder.releaseOutputBuffer(bufferIndex, false);
            }
        }
    }

    private void resetOutputFormatNew() {
        if (mMuxerStarted) {
            throw new IllegalStateException("output format already changed!");
        }
        MediaFormat newFormat = mVideoEncoder.getOutputFormat();

        Log.i(TAG, "output format changed.\n new format: " + newFormat.toString());
        mVideoTrackIndex = mMuxer.addTrack(newFormat);
        mMuxer.start();
        mMuxerStarted = true;
        Log.i(TAG, "started media muxer, videoIndex=" + mVideoTrackIndex);

        Log.i(TAG, "output format changed.\n new format: " + newFormat.toString());
        getSpsPpsByteBuffer(newFormat);
        Log.i(TAG, "started media muxer, videoIndex=" + mVideoTrackIndex);
    }

    /**
     * 获取编码SPS和PPS信息
     * @param newFormat
     */
    private void getSpsPpsByteBuffer(MediaFormat newFormat) {
        sps = newFormat.getByteBuffer("csd-0").array();
        pps = newFormat.getByteBuffer("csd-1").array();
        // EventBus.getDefault().post(new EventLogBean("编码器初始化完成"));
    }

    private void resetOutputFormat() {

        if (mMuxerStarted) {
            throw new IllegalStateException("format changed");
        }
        MediaFormat newFormat = mVideoEncoder.getOutputFormat();

        mVideoTrackIndex = mMuxer.addTrack(newFormat);
        mMuxer.start();
        mMuxerStarted = true;
    }

    private void encodeToVideoTrack(int bufferIndex) {
        ByteBuffer encodeData = mVideoEncoder.getOutputBuffer(bufferIndex);

        if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
            mBufferInfo.size = 0;
        }

        if (mBufferInfo.size == 0) {
            encodeData = null;
        } else {
            //还不清楚到哪里去的.
        }

        if (encodeData != null) {
            encodeData.position(mBufferInfo.offset);
            encodeData.limit(mBufferInfo.offset + mBufferInfo.size);

            mMuxer.writeSampleData(mVideoTrackIndex, encodeData, mBufferInfo);
        }


    }

    private void releaseEncoder() {
        if (mVideoEncoder != null) {
            mVideoEncoder.stop();
            mVideoEncoder.release();
            mVideoEncoder = null;
        }

        if (mVirtualDisplay != null) {
            mVirtualDisplay.release();
            mVirtualDisplay = null;
        }

        if (mMediaProjection != null) {
            mMediaProjection.stop();
            mMediaProjection = null;
        }

        if (mMuxer != null) {
            mMuxer.stop();
            mMuxer.release();
            mMuxer = null;
        }

        mBufferInfo = null;
        mVideoTrackIndex = -1;

    }
}

需要的帧率控制类:

(1)EGLRender

public class EGLRender implements SurfaceTexture.OnFrameAvailableListener {
    private final int HANDLER_PHOTO_CALLBACK = 0;
    private static final String TAG = "EncodeDecodeSurface";
    private static final boolean VERBOSE = false;           // lots of logging

    private STextureRender mTextureRender;
    private SurfaceTexture mSurfaceTexture;

    private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY;
    private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT;
    private EGLContext mEGLContextEncoder = EGL14.EGL_NO_CONTEXT;
    private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE;
    private EGLSurface mEGLSurfaceEncoder = EGL14.EGL_NO_SURFACE;

    private Surface decodeSurface;

    private int mWidth;
    private int mHeight;
    private int fps;
    private int video_interval;
    private boolean mFrameAvailable = true;
    private onFrameCallBack callBack;
    private boolean hasCutScreen = false;

    private boolean start;
    private long time = 0;
    private long current_time;

    private Handler handler = new Handler(Looper.getMainLooper()) {
        @Override
        public void handleMessage(Message msg) {
            super.handleMessage(msg);
            switch (msg.what) {
                case HANDLER_PHOTO_CALLBACK:
                    if (callBack != null && msg.obj != null)
                        callBack.onCutScreen((Bitmap) msg.obj);
                    break;
            }
        }
    };
    private ExecutorService singleThreadExecutor = Executors.newSingleThreadExecutor();

    private class CutScreeenThread implements Runnable {
        private int[] modelData;

        public CutScreeenThread(int[] modelData) {
            this.modelData = modelData;
        }

        @Override
        public void run() {

            int[] ArData = new int[modelData.length];
            int offset1, offset2;
            for (int i = 0; i < mHeight; i++) {
                offset1 = i * mWidth;
                offset2 = (mHeight - i - 1) * mWidth;
                for (int j = 0; j < mWidth; j++) {
                    int texturePixel = modelData[offset1 + j];
                    int blue = (texturePixel >> 16) & 0xff;
                    int red = (texturePixel << 16) & 0x00ff0000;
                    int pixel = (texturePixel & 0xff00ff00) | red | blue;
                    ArData[offset2 + j] = pixel;
                }
            }
            Bitmap bitmap = Bitmap.createBitmap(ArData, mWidth, mHeight, Bitmap.Config.ARGB_8888);
            modelData = null;
            ArData = null;

            handler.obtainMessage(HANDLER_PHOTO_CALLBACK, bitmap).sendToTarget();
        }
    }
    public void setCallBack(onFrameCallBack callBack) {
        this.callBack = callBack;
    }

    public interface onFrameCallBack {
        void onUpdate();
        void onCutScreen(Bitmap bitmap);
    }


    public EGLRender(Surface surface, int mWidth, int mHeight, int fps) {
        this.mWidth = mWidth;
        this.mHeight = mHeight;
        initFPs(fps);
        eglSetup(surface);
        makeCurrent();
        setup();
    }

    private void initFPs(int fps) {
        this.fps = fps;
        video_interval = 1000 / fps;
    }

    /**
     * Prepares EGL.  We want a GLES 2.0 context and a surface that supports pbuffer.
     */
    private void eglSetup(Surface surface) {
        mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
        if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
            throw new RuntimeException("unable to get EGL14 display");
        }
        int[] version = new int[2];
        if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
            mEGLDisplay = null;
            throw new RuntimeException("unable to initialize EGL14");
        }

        // Configure EGL for pbuffer and OpenGL ES 2.0, 24-bit RGB.
        int[] attribList = {
                EGL14.EGL_RED_SIZE, 8,
                EGL14.EGL_GREEN_SIZE, 8,
                EGL14.EGL_BLUE_SIZE, 8,
                EGL14.EGL_ALPHA_SIZE, 8,
                EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
                EGL14.EGL_SURFACE_TYPE, EGL14.EGL_PBUFFER_BIT,
                EGL14.EGL_NONE
        };
        EGLConfig[] configs = new EGLConfig[1];
        int[] numConfigs = new int[1];
        if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
                numConfigs, 0)) {
            throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config");
        }

        EGLConfig configEncoder = getConfig(2);

        // Configure context for OpenGL ES 2.0.
        int[] attrib_list = {
                EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
                EGL14.EGL_NONE
        };
        mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT,
                attrib_list, 0);
        checkEglError("eglCreateContext");
        if (mEGLContext == null) {
            throw new RuntimeException("null context");
        }

        mEGLContextEncoder = EGL14.eglCreateContext(mEGLDisplay, configEncoder, mEGLContext,
                attrib_list, 0);
        checkEglError("eglCreateContext");
        if (mEGLContextEncoder == null) {
            throw new RuntimeException("null context2");
        }

        // Create a pbuffer surface.
        int[] surfaceAttribs = {
                EGL14.EGL_WIDTH, mWidth,
                EGL14.EGL_HEIGHT, mHeight,
                EGL14.EGL_NONE
        };
        mEGLSurface = EGL14.eglCreatePbufferSurface(mEGLDisplay, configs[0], surfaceAttribs, 0);


        checkEglError("eglCreatePbufferSurface");
        if (mEGLSurface == null) {
            throw new RuntimeException("surface was null");
        }


        int[] surfaceAttribs2 = {
                EGL14.EGL_NONE
        };
        mEGLSurfaceEncoder = EGL14.eglCreateWindowSurface(mEGLDisplay, configEncoder, surface,
                surfaceAttribs2, 0);   //creates an EGL window surface and returns its handle
        checkEglError("eglCreateWindowSurface");
        if (mEGLSurfaceEncoder == null) {
            throw new RuntimeException("surface was null");
        }
    }

    /**
     * Makes our EGL context and surface current.
     */
    public void makeCurrent() {
        if (!EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) {
            throw new RuntimeException("eglMakeCurrent failed");
        }
    }

    /**
     * Creates interconnected instances of TextureRender, SurfaceTexture, and Surface.
     */
    private void setup() {
        mTextureRender = new STextureRender(mWidth, mHeight);
        mTextureRender.surfaceCreated();

        if (VERBOSE) Log.d(TAG, "textureID=" + mTextureRender.getTextureId());
        mSurfaceTexture = new SurfaceTexture(mTextureRender.getTextureId());
        mSurfaceTexture.setDefaultBufferSize(mWidth, mHeight);
        mSurfaceTexture.setOnFrameAvailableListener(this);
        decodeSurface = new Surface(mSurfaceTexture);
    }

    public Surface getDecodeSurface() {
        return decodeSurface;
    }

    private EGLConfig getConfig(int version) {
        int renderableType = EGL14.EGL_OPENGL_ES2_BIT;
        if (version >= 3) {
            renderableType |= EGLExt.EGL_OPENGL_ES3_BIT_KHR;
        }

        // The actual surface is generally RGBA or RGBX, so situationally omitting alpha
        // doesn't really help.  It can also lead to a huge performance hit on glReadPixels()
        // when reading into a GL_RGBA buffer.
        int[] attribList = {
                EGL14.EGL_RED_SIZE, 8,
                EGL14.EGL_GREEN_SIZE, 8,
                EGL14.EGL_BLUE_SIZE, 8,
                EGL14.EGL_ALPHA_SIZE, 8,
                EGL14.EGL_RENDERABLE_TYPE, renderableType,
                EGL14.EGL_NONE, 0,      // placeholder for recordable [@-3]
                EGL14.EGL_NONE
        };
        EGLConfig[] configs = new EGLConfig[1];
        int[] numConfigs = new int[1];
        if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
                numConfigs, 0)) {
            Log.w(TAG, "unable to find RGB8888 / " + version + " EGLConfig");
            return null;
        }
        return configs[0];
    }

    private void checkEglError(String msg) {
        int error;
        if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
            throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
        }
    }

    public void makeCurrent(int index) {

        if (index == 0) {
            if (!EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) {
                throw new RuntimeException("eglMakeCurrent failed");
            }
        } else {
            if (!EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurfaceEncoder, mEGLSurfaceEncoder, mEGLContextEncoder)) {
                throw new RuntimeException("eglMakeCurrent failed");
            }
        }

    }

    public void setPresentationTime(long nsecs) {
        EGLExt.eglPresentationTimeANDROID(mEGLDisplay, mEGLSurfaceEncoder, nsecs);
        checkEglError("eglPresentationTimeANDROID");
    }

    public void awaitNewImage() {
        if (mFrameAvailable) {
            mFrameAvailable = false;
            mSurfaceTexture.updateTexImage();
        }
    }

    public boolean swapBuffers() {
        boolean result = EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurfaceEncoder);
        checkEglError("eglSwapBuffers");
        return result;
    }

    private int count = 1;

    @Override
    public void onFrameAvailable(SurfaceTexture surfaceTexture) {
        mFrameAvailable = true;
    }

    private long computePresentationTimeNsec(int frameIndex) {
        final long ONE_BILLION = 1000000000;
        return frameIndex * ONE_BILLION / fps;
    }

    public void drawImage() {
        mTextureRender.drawFrame();
    }

    /**
     * 开始录屏
     */
    public void start() {
        start = true;
        while (start) {
            makeCurrent(1);
            awaitNewImage();
            current_time = System.currentTimeMillis();
            if (current_time - time >= video_interval) {
                //todo 帧率控制
                drawImage();
                callBack.onUpdate();
                setPresentationTime(computePresentationTimeNsec(count++));
                swapBuffers();
                if (hasCutScreen) {
                    getScreen();
                    hasCutScreen = false;
                }
                time = current_time;
            }
        }
    }


    /**
     * 获取当前屏幕信息
     */
    private void getScreen() {
        IntBuffer buffer = IntBuffer.allocate(mWidth * mHeight);
        buffer.position(0);
        GLES20.glReadPixels(0, 0, mWidth, mHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buffer);
        int[] modelData = buffer.array();
        buffer.clear();
        singleThreadExecutor.execute(new CutScreeenThread(modelData));
    }
    public void cutScreen(){
        hasCutScreen=true;
    }
    public void stop() {
        start = false;
    }
}

(2)Glutil

public class GlUtil {
    public static final String TAG = "EncodeDecodeSurface";

    /** Identity matrix for general use.  Don't modify or life will get weird. */
    public static final float[] IDENTITY_MATRIX;
    static {

        IDENTITY_MATRIX = new float[16];
        Matrix.setIdentityM(IDENTITY_MATRIX, 0);
        //Matrix.scaleM(IDENTITY_MATRIX,0,0.5f,0.5f,1);
    }

    private static final int SIZEOF_FLOAT = 4;


    private GlUtil() {}     // do not instantiate

    /**
     * Creates a new program from the supplied vertex and fragment shaders.
     *
     * @return A handle to the program, or 0 on failure.
     */
    public static int createProgram(String vertexSource, String fragmentSource) {
        int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
        if (vertexShader == 0) {
            return 0;
        }
        int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
        if (pixelShader == 0) {
            return 0;
        }

        int program = GLES20.glCreateProgram();
        checkGlError("glCreateProgram");
        if (program == 0) {
            Log.e(TAG, "Could not create program");
        }
        GLES20.glAttachShader(program, vertexShader);
        checkGlError("glAttachShader");
        GLES20.glAttachShader(program, pixelShader);
        checkGlError("glAttachShader");
        GLES20.glLinkProgram(program);
        int[] linkStatus = new int[1];
        GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
        if (linkStatus[0] != GLES20.GL_TRUE) {
            Log.e(TAG, "Could not link program: ");
            Log.e(TAG, GLES20.glGetProgramInfoLog(program));
            GLES20.glDeleteProgram(program);
            program = 0;
        }
        return program;
    }

    /**
     * Compiles the provided shader source.
     *
     * @return A handle to the shader, or 0 on failure.
     */
    public static int loadShader(int shaderType, String source) {
        int shader = GLES20.glCreateShader(shaderType);
        checkGlError("glCreateShader type=" + shaderType);
        GLES20.glShaderSource(shader, source);
        GLES20.glCompileShader(shader);
        int[] compiled = new int[1];
        GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
        if (compiled[0] == 0) {
            Log.e(TAG, "Could not compile shader " + shaderType + ":");
            Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
            GLES20.glDeleteShader(shader);
            shader = 0;
        }
        return shader;
    }

    /**
     * Checks to see if a GLES error has been raised.
     */
    public static void checkGlError(String op) {
        int error = GLES20.glGetError();
        if (error != GLES20.GL_NO_ERROR) {
            String msg = op + ": glError 0x" + Integer.toHexString(error);
            Log.e(TAG, msg);
            throw new RuntimeException(msg);
        }
    }

    /**
     * Checks to see if the location we obtained is valid.  GLES returns -1 if a label
     * could not be found, but does not set the GL error.
     * <p>
     * Throws a RuntimeException if the location is invalid.
     */
    public static void checkLocation(int location, String label) {
        if (location < 0) {
            throw new RuntimeException("Unable to locate '" + label + "' in program");
        }
    }

    /**
     * Creates a texture from raw data.
     *
     * @param data Image data, in a "direct" ByteBuffer.
     * @param width Texture width, in pixels (not bytes).
     * @param height Texture height, in pixels.
     * @param format Image data format (use constant appropriate for glTexImage2D(), e.g. GL_RGBA).
     * @return Handle to texture.
     */
    public static int createImageTexture(ByteBuffer data, int width, int height, int format) {
        int[] textureHandles = new int[1];
        int textureHandle;

        GLES20.glGenTextures(1, textureHandles, 0);
        textureHandle = textureHandles[0];
        GlUtil.checkGlError("glGenTextures");

        // Bind the texture handle to the 2D texture target.
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle);

        // Configure min/mag filtering, i.e. what scaling method do we use if what we're rendering
        // is smaller or larger than the source image.
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER,
                GLES20.GL_LINEAR);
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER,
                GLES20.GL_LINEAR);
        GlUtil.checkGlError("loadImageTexture");

        // Load the data from the buffer into the texture handle.
        GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, /*level*/ 0, format,
                width, height, /*border*/ 0, format, GLES20.GL_UNSIGNED_BYTE, data);
        GlUtil.checkGlError("loadImageTexture");

        return textureHandle;
    }

    /**
     * Allocates a direct float buffer, and populates it with the float array data.
     */
    public static FloatBuffer createFloatBuffer(float[] coords) {
        // Allocate a direct ByteBuffer, using 4 bytes per float, and copy coords into it.
        ByteBuffer bb = ByteBuffer.allocateDirect(coords.length * SIZEOF_FLOAT);
        bb.order(ByteOrder.nativeOrder());
        FloatBuffer fb = bb.asFloatBuffer();
        fb.put(coords);
        fb.position(0);
        return fb;
    }

    /**
     * Writes GL version info to the log.
     */
    public static void logVersionInfo() {
        Log.i(TAG, "vendor  : " + GLES20.glGetString(GLES20.GL_VENDOR));
        Log.i(TAG, "renderer: " + GLES20.glGetString(GLES20.GL_RENDERER));
        Log.i(TAG, "version : " + GLES20.glGetString(GLES20.GL_VERSION));

        if (false) {
            int[] values = new int[1];
            GLES30.glGetIntegerv(GLES30.GL_MAJOR_VERSION, values, 0);
            int majorVersion = values[0];
            GLES30.glGetIntegerv(GLES30.GL_MINOR_VERSION, values, 0);
            int minorVersion = values[0];
            if (GLES30.glGetError() == GLES30.GL_NO_ERROR) {
                Log.i(TAG, "iversion: " + majorVersion + "." + minorVersion);
            }
        }
    }
}


(3)STextureRender
public  class STextureRender {
    private static final int FLOAT_SIZE_BYTES = 4;
    private static final String TAG = "STextureRendering";


    private static final float FULL_RECTANGLE_COORDS[] = {
            -1.0f, -1.0f,1.0f,   // 0 bottom left
            1.0f, -1.0f,1.0f,   // 1 bottom right
            -1.0f,  1.0f,1.0f,   // 2 top left
            1.0f,  1.0f,1.0f   // 3 top right
    };

    private static final float FULL_RECTANGLE_TEX_COORDS[] = {
            0.0f, 1.0f, 1f,1.0f,    // 0 bottom left
            1.0f, 1.0f,1f,1.0f,     // 1 bottom right
            0.0f, 0.0f, 1f,1.0f,    // 2 top left
            1.0f, 0.0f ,1f,1.0f     // 3 top right
    };

    private static final FloatBuffer FULL_RECTANGLE_BUF =
            GlUtil.createFloatBuffer(FULL_RECTANGLE_COORDS);
    private static final FloatBuffer FULL_RECTANGLE_TEX_BUF =
            GlUtil.createFloatBuffer(FULL_RECTANGLE_TEX_COORDS);


    private static final String VERTEX_SHADER =
            "uniform mat4 uMVPMatrix;\n" +
                    "uniform mat4 uSTMatrix;\n" +
                    "attribute vec4 aPosition;\n" +
                    "attribute vec4 aTextureCoord;\n" +
                    "varying vec4 vTextureCoord;\n" +
                    "void main() {\n" +
                    "    gl_Position = uMVPMatrix * aPosition;\n" +
                    "    vTextureCoord = uSTMatrix * aTextureCoord;\n" +
                    "}\n";

    private static final String FRAGMENT_SHADER =
            "#extension GL_OES_EGL_image_external : require\n" +
                    "precision mediump float;\n" +      // highp here doesn't seem to matter
                    "varying vec4 vTextureCoord;\n" +
                    "uniform samplerExternalOES sTexture;\n" +
                    "void main() {\n" +
                    "    gl_FragColor = texture2D(sTexture, vTextureCoord.xy/vTextureCoord.z);" +
                    "}\n";




    private float[] mMVPMatrix = new float[16];
    private float[] mSTMatrix = new float[16];

    private int mProgram;
    private int mTextureID = -12345;
    private int muMVPMatrixHandle;
    private int muSTMatrixHandle;
    private int maPositionHandle;
    private int maTextureHandle;
    private int mWidth;
    private int mHeight;

    public STextureRender(int mwidth, int mHeight) {
        this();
        this.mWidth = mwidth;
        this.mHeight = mHeight;
    }

    public STextureRender() {
        Matrix.setIdentityM(mSTMatrix, 0);
    }

    public int getTextureId() {
        return mTextureID;
    }



    /**
     * Initializes GL state.  Call this after the EGL surface has been created and made current.
     */
    public void surfaceCreated() {
        mProgram = GlUtil.createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
        if (mProgram == 0) {
            throw new RuntimeException("failed creating program");
        }


        maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
        maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
        muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
        muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");


        mTextureID = initTex();
    }
    /**
     * create external texture
     *
     * @return texture ID
     */
    public static int initTex() {
        int[] tex = new int[1];
        GLES20.glGenTextures(1, tex, 0);
        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,tex[0]);
        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
                GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
                GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
                GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
                GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
        return tex[0];
    }


    /**
     * Draws the external texture in SurfaceTexture onto the current EGL surface.
     */
    public void drawFrame() {
        GLES20.glUseProgram(mProgram);

        // Enable the "aPosition" vertex attribute.
        GLES20.glEnableVertexAttribArray(maPositionHandle);

        // Connect vertexBuffer to "aPosition".
        GLES20.glVertexAttribPointer(maPositionHandle, 3,
                GLES20.GL_FLOAT, false, 3*FLOAT_SIZE_BYTES, FULL_RECTANGLE_BUF);

        // Enable the "aTextureCoord" vertex attribute.
        GLES20.glEnableVertexAttribArray(maTextureHandle);

        // Connect texBuffer to "aTextureCoord".
        GLES20.glVertexAttribPointer(maTextureHandle, 4,
                GLES20.GL_FLOAT, false, 4*FLOAT_SIZE_BYTES, FULL_RECTANGLE_TEX_BUF);

        Matrix.setIdentityM(mMVPMatrix, 0);
        GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
        GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);


        // Draw the rect.
        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);

        // Done -- disable vertex array, texture, and program.
        GLES20.glDisableVertexAttribArray(maPositionHandle);
        GLES20.glDisableVertexAttribArray(maTextureHandle);
        GLES20.glUseProgram(0);

    }


}

 

2.在Activity中调用:

public class Main2Activity extends AppCompatActivity implements View.OnClickListener{
    private MediaProjectionManager mMediaProjectionManager;
    public static MediaProjection mMediaProjection;
    public static final int REQUEST_SDCARD_CODE = 0x1243;
    public static final int REQUEST_CODE = 0x123;

    private MyShoter mShoter;
    //初始化FPS
    private int mFps = 20;
    //初始化bitRate
    private int mBitRate = 500000;         //6 Mbps

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main2);
        mMediaProjectionManager =
                (MediaProjectionManager) getSystemService(MEDIA_PROJECTION_SERVICE);
    }

    @Override
    public void onClick(View v) {
        switch (v.getId()) {
            //开始录制操作
            case R.id.id_shot_video:
                shot();
                break;
        }
    }

    private void shot() {
        if (mShoter != null) {
            mShoter.stopShot();
            mShoter = null;
        } else {
            Intent captureIntent = mMediaProjectionManager.createScreenCaptureIntent();
            startActivityForResult(captureIntent, REQUEST_CODE);
        }
    }

    @Override
    protected void onActivityResult(int requestCode, int resultCode, Intent data) {
        if (requestCode == REQUEST_CODE) {
            startShotNow(resultCode, data);
        }
    }

    public void startShotNow(int resultCode, Intent data) {
        mMediaProjection =
                mMediaProjectionManager.getMediaProjection(resultCode, data);

        if (mMediaProjection == null) {
            Log.e("motejia", "MediaProjection is null");
            return;
        }

        File mFile = new File(文件名);



        mShoter = new MyShoter(宽,高, mBitRate, 1,
                mFps, mMediaProjection, mFile.getAbsolutePath());
        new Thread(mShoter).start();

        moveTaskToBack(true);
    }
}
  • 4
    点赞
  • 16
    收藏
    觉得还不错? 一键收藏
  • 2
    评论
要在Android Studio中实现录屏功能,你可以使用MediaProjection API。下面是一些基本步骤来实现它: 1. 在AndroidManifest.xml文件中添加以下权限: ``` <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" /> <uses-permission android:name="android.permission.RECORD_AUDIO" /> ``` 2. 创建一个Service类来处理录屏逻辑。在这个Service中,你需要初始化MediaProjectionManager和MediaRecorder,并获取用户的屏幕和音频权限。下面是一个简单的实现示例: ```java public class ScreenRecordService extends Service { private MediaProjectionManager mediaProjectionManager; private MediaProjection mediaProjection; private VirtualDisplay virtualDisplay; private MediaRecorder mediaRecorder; @Override public IBinder onBind(Intent intent) { return null; } @Override public int onStartCommand(Intent intent, int flags, int startId) { mediaProjectionManager = (MediaProjectionManager) getSystemService(Context.MEDIA_PROJECTION_SERVICE); mediaRecorder = new MediaRecorder(); initRecorder(); startActivityForResult(mediaProjectionManager.createScreenCaptureIntent(), REQUEST_CODE_CAPTURE); return START_NOT_STICKY; } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { if (requestCode == REQUEST_CODE_CAPTURE) { mediaProjection = mediaProjectionManager.getMediaProjection(resultCode, data); virtualDisplay = createVirtualDisplay(); mediaRecorder.start(); } } private void initRecorder() { mediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE); mediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4); mediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264); mediaRecorder.setVideoEncodingBitRate(512 * 1000); mediaRecorder.setVideoFrameRate(30); mediaRecorder.setVideoSize(DISPLAY_WIDTH, DISPLAY_HEIGHT); mediaRecorder.setOutputFile(getFilePath()); } private VirtualDisplay createVirtualDisplay() { return mediaProjection.createVirtualDisplay("MainActivity", DISPLAY_WIDTH, DISPLAY_HEIGHT, screenDensity, DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR, mediaRecorder.getSurface(), null /*Callbacks*/, null /*Handler*/); } private String getFilePath() { // 返回你想要保存录屏视频的文件路径 } } ``` 3. 在你的MainActivity(或其他需要录屏的Activity)中,启动该Service: ```java Intent intent = new Intent(this, ScreenRecordService.class); startService(intent); ``` 这只是一个基本的示例,你可能还需要处理一些其他的逻辑,如停止录屏、处理权限请求和错误处理等。希望对你有所帮助!

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值