EGL是什么?
EGL 是 OpenGL ES 渲染 API 和本地窗口系统(native platform window system)之间的一个中间接口层,它主要由系统制造商实现,为了让OpenGL ES能够绘制在当前设备上,我们需要EGL作为OpenGL ES与设备的桥梁。在Android中我们要使用OpenGL那么必须先初始化EGL环境
初始化EGL环境
- 获取EGL实例
egl = (EGL10) EGLContext.getEGL();
- 创建一个eglDisplay
eglDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
- 创建一个EGL上下文
eglContext = createEGLContext(egl, eglDisplay, eglConfig);
- 创建一个EGLSurface,eglCreatePbufferSurface是创建一个离屏刷新的surface,你也可以创建一个直接
在屏幕显示的surface,通过eglCreateWindowSurface
//创建一个离屏刷新的surface
EGLSurface eglSurface = egl.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs);
- 将上下文绑定到当前线程
egl.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)
通过上面的步骤我们就可以使用OpenGL了
private void processImage(Bitmap bitmap) {
int imageTextureID = GLHelper.loadTexture(bitmap);
int[] fb = new int[1], fbt = new int[1];
GLHelper.createCamFrameBuff(fb, fbt, bitmap.getWidth(), bitmap.getHeight());
int targetBuffer = fb[0];
int targettextureId = fbt[0];
if (manager.mFilter != null) {
manager.mFilter.onDraw(imageTextureID, targetBuffer,
manager.mVertexPosBuffer, manager.mTexPosBuffer, manager.mOrder);
Bitmap result = GLHelper.frameBufferToBitmap(targetBuffer, bitmap.getWidth(), bitmap.getHeight());
if (manager.mProcessListener != null) {
manager.mProcessListener.processFinish(result);
}
bitmap.recycle();
}
}
完整例子
package com.dong.opencamera.core;
/**
* @author : liaojidong
* @e-mail : liaojidong@yy.com
* @date : 2020-01-20 10:13
* @desc :
*/
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.opengl.EGL14;
import android.opengl.GLES20;
import android.os.Bundle;
import android.os.Handler;
import android.os.Handler.Callback;
import android.os.HandlerThread;
import android.os.Message;
import android.util.Log;
import com.dong.opencamera.core.filter.IVideoFilter;
import com.dong.opencamera.utils.GLHelper;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import java.util.LinkedList;
import java.util.Queue;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.egl.EGLSurface;
import static javax.microedition.khronos.egl.EGL10.EGL_NO_CONTEXT;
import static javax.microedition.khronos.egl.EGL10.EGL_NO_SURFACE;
public class GPUProcessor {
private static final String TAG = GPUProcessor.class.getSimpleName();
private static final String THREAD_NAME = "GLThread-Manger";
private GLThread mGLThread;
private EGLConfigChooser eglConfigChooser;
private EGLSurfaceFactory eglSurfaceFactory;
private int eglContextClientVersion = 2;
private IVideoFilter mFilter;
private OnProcessListener mProcessListener;
private volatile boolean isInit = false;
private Object mGLLock = new Object();
private final Queue<Runnable> runOnProcess;
private FloatBuffer mVertexPosBuffer;
private FloatBuffer mTexPosBuffer;
private ShortBuffer mOrder;
private float[] mPosCoordinate = {
// x y z
-1.0f, 1.0f, 0f, // top left
-1.0f, -1.0f, 0f, // bottom left
1.0f, -1.0f, 0f, // bottom right,s
1.0f, 1.0f, 0f, // top right
};
private static final short[] ORDERS = {
0, 1, 2, // 左下角三角形
2, 3, 0 // 右上角三角形
};
private float[] mTexCoordinateBackRight = {
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0f,
1.0f, 1.0f
};
public void setProcessListener(OnProcessListener listener) {
this.mProcessListener = listener;
}
public GPUProcessor() {
runOnProcess = new LinkedList<>();
mVertexPosBuffer = GLHelper.convertToFloatBuffer(mPosCoordinate);
mTexPosBuffer = GLHelper.convertToFloatBuffer(mTexCoordinateBackRight);
mOrder = GLHelper.convertToShortBuffer(ORDERS);
}
public interface OnProcessListener {
void processFinish(Bitmap result);
}
public void setEGLContextClientVersion(int version) {
checkRenderThreadState();
eglContextClientVersion = version;
}
public void setFilter(IVideoFilter filter) {
runOnGLThread(() -> {
GPUProcessor.this.mFilter = filter;
GPUProcessor.this.mFilter.onInit();
});
}
public void runOnGLThread(Runnable runnable) {
synchronized (runOnProcess) {
runOnProcess.add(runnable);
}
}
private void runAll(Queue<Runnable> queue) {
synchronized (queue) {
while (!queue.isEmpty()) {
queue.poll().run();
}
}
}
private void checkRenderThreadState() {
if (mGLThread != null) {
throw new IllegalStateException("setRenderer has already been called for this instance.");
}
}
private void init() {
if (isInit) {
return;
}
if (eglConfigChooser == null) {
eglConfigChooser = new SimpleEGLConfigChooser(true);
}
if (eglSurfaceFactory == null) {
eglSurfaceFactory = new OffscreenSurfaceFactory();
}
mGLThread = new GLThread(THREAD_NAME);
mGLThread.start();
mGLThread.init();
synchronized (mGLLock) {
while (!isInit) {
try {
mGLLock.wait();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}
public void processImage(String imagePath, int imageRate) {
BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
BitmapFactory.decodeFile(imagePath, options);
init();
mGLThread.processImage(imagePath, imageRate);
}
public void release() {
if (mGLThread != null) {
mGLThread.release();
}
}
private static class EGLHelper {
private int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
private GPUProcessor manager;
EGL10 egl;
EGLDisplay eglDisplay;
EGLSurface eglSurface;
EGLConfig eglConfig;
EGLContext eglContext;
EGLHelper(GPUProcessor manager) {
this.manager = manager;
}
private void initGL() {
Log.i(TAG, "start() tid=" + Thread.currentThread().getId());
/*
* Get an EGL instance
*/
egl = (EGL10) EGLContext.getEGL();
/*
* Get to the default display.
*/
eglDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
if (eglDisplay == EGL10.EGL_NO_DISPLAY) {
throw new RuntimeException("eglGetDisplay failed");
}
/*
* We can now initialize EGL for that display
*/
int[] version = new int[2];
if (!egl.eglInitialize(eglDisplay, version)) {
throw new RuntimeException("eglInitialize failed");
}
eglConfig = manager.eglConfigChooser.chooseConfig(egl, eglDisplay);
/*
* Create an EGL context. We want to do this as rarely as we can, because an
* EGL context is a somewhat heavy object.
*/
eglContext = createEGLContext(egl, eglDisplay, eglConfig);
if (eglContext == null || eglContext == EGL_NO_CONTEXT) {
eglContext = null;
throwEglException("createContext");
}
synchronized (manager.mGLLock) {
manager.isInit = true;
manager.mGLLock.notifyAll();
}
}
private void release() {
if (egl != null && manager.isInit) {
egl.eglMakeCurrent(eglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT); //线程跟EGL环境解除绑定
egl.eglDestroySurface(eglDisplay, eglSurface);
egl.eglDestroyContext(eglDisplay, eglContext); //清理掉上下文环境
egl.eglTerminate(eglDisplay); //关闭掉显示设备
}
}
private void updateSurface(int surfaceWidth, int surfaceHeight) {
if (eglSurface != null) {
egl.eglMakeCurrent(eglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
egl.eglDestroySurface(eglDisplay, eglSurface);
eglSurface = null;
}
Log.i(TAG,
"createContext " + eglContext + " tid=" + Thread.currentThread().getId());
eglSurface = manager.eglSurfaceFactory.createSurface(egl,
eglDisplay, eglConfig, surfaceWidth, surfaceHeight);
if (eglSurface == null) {
throwEglException("createSurface");
}
if (!egl.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
this.throwEglException("eglMakeCurrent failed");
}
GLES20.glViewport(0, 0, surfaceWidth, surfaceHeight);
}
private void processImage(Bitmap bitmap) {
int imageTextureID = GLHelper.loadTexture(bitmap);
int[] fb = new int[1], fbt = new int[1];
GLHelper.createCamFrameBuff(fb, fbt, bitmap.getWidth(), bitmap.getHeight());
int targetBuffer = fb[0];
int targettextureId = fbt[0];
if (manager.mFilter != null) {
manager.mFilter.onDraw(imageTextureID, targetBuffer,
manager.mVertexPosBuffer, manager.mTexPosBuffer, manager.mOrder);
Bitmap result = GLHelper.frameBufferToBitmap(targetBuffer, bitmap.getWidth(), bitmap.getHeight());
if (manager.mProcessListener != null) {
manager.mProcessListener.processFinish(result);
}
bitmap.recycle();
}
}
private EGLContext createEGLContext(EGL10 egl, EGLDisplay display, EGLConfig config) {
int[] attrib_list = {
EGL_CONTEXT_CLIENT_VERSION, manager.eglContextClientVersion, EGL10.EGL_NONE
};
return egl.eglCreateContext(display, config, EGL_NO_CONTEXT,
manager.eglContextClientVersion != 0 ? attrib_list : null);
}
private void throwEglException(String function) {
throwEglException(function, egl.eglGetError());
}
public static void throwEglException(String function, int error) {
String message = formatEglError(function, error);
Log.e(TAG,
"throwEglException tid=" + Thread.currentThread().getId() + " " + message);
throw new RuntimeException(message);
}
public static String formatEglError(String function, int error) {
return function + " failed: " + error;
}
}
public interface EGLSurfaceFactory {
EGLSurface createSurface(EGL10 egl, EGLDisplay eglDisplay, EGLConfig eglConfig, int width, int height);
}
private class OffscreenSurfaceFactory implements EGLSurfaceFactory {
@Override
public EGLSurface createSurface(EGL10 egl, EGLDisplay eglDisplay, EGLConfig eglConfig, int width, int height) {
int[] surfaceAttribs = new int[]{EGL14.EGL_WIDTH, width,
EGL14.EGL_HEIGHT, height, EGL14.EGL_NONE};
//创建一个离屏刷新的surface
EGLSurface eglSurface = egl.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs);
if (eglSurface == null) {
EGLHelper.throwEglException("createSurface", egl.eglGetError());
}
return eglSurface;
}
}
/**
* An interface for choosing an EGLConfig configuration from a list of
* potential configurations.
* <p>
* This interface must be implemented by clients wishing to call
*/
public interface EGLConfigChooser {
/**
* Choose a configuration from the list. Implementors typically
* implement this method by calling
* {@link EGL10#eglChooseConfig} and iterating through the results. Please consult the
* EGL specification available from The Khronos Group to learn how to call eglChooseConfig.
*
* @param egl the EGL10 for the current display.
* @param display the current display.
* @return the chosen configuration.
*/
EGLConfig chooseConfig(EGL10 egl, EGLDisplay display);
}
private abstract class BaseConfigChooser implements EGLConfigChooser {
public BaseConfigChooser(int[] configSpec) {
mConfigSpec = filterConfigSpec(configSpec);
}
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) {
int[] num_config = new int[1];
if (!egl.eglChooseConfig(display, mConfigSpec, null, 0, num_config)) {
throw new IllegalArgumentException("eglChooseConfig failed");
}
int numConfigs = num_config[0];
if (numConfigs <= 0) {
throw new IllegalArgumentException("No configs match configSpec");
}
EGLConfig[] configs = new EGLConfig[numConfigs];
if (!egl.eglChooseConfig(display, mConfigSpec, configs, numConfigs, num_config)) {
throw new IllegalArgumentException("eglChooseConfig#2 failed");
}
EGLConfig config = chooseConfig(egl, display, configs);
if (config == null) {
throw new IllegalArgumentException("No config chosen");
}
return config;
}
abstract EGLConfig chooseConfig(EGL10 egl, EGLDisplay display, EGLConfig[] configs);
protected int[] mConfigSpec;
private int[] filterConfigSpec(int[] configSpec) {
if (eglContextClientVersion != 2) {
return configSpec;
}
/* We know none of the subclasses define EGL_RENDERABLE_TYPE.
* And we know the configSpec is well formed.
*/
int len = configSpec.length;
int[] newConfigSpec = new int[len + 2];
System.arraycopy(configSpec, 0, newConfigSpec, 0, len - 1);
newConfigSpec[len - 1] = EGL10.EGL_RENDERABLE_TYPE;
newConfigSpec[len] = 0x0004; /* EGL_OPENGL_ES2_BIT */
newConfigSpec[len + 1] = EGL10.EGL_NONE;
return newConfigSpec;
}
}
/**
* Choose a configuration with exactly the specified r,g,b,a sizes,
* and at least the specified depth and stencil sizes.
*/
private class ComponentSizeChooser extends BaseConfigChooser {
public ComponentSizeChooser(int redSize, int greenSize, int blueSize, int alphaSize,
int depthSize, int stencilSize) {
super(new int[]{
EGL10.EGL_RED_SIZE, redSize, EGL10.EGL_GREEN_SIZE, greenSize, EGL10.EGL_BLUE_SIZE,
blueSize, EGL10.EGL_ALPHA_SIZE, alphaSize, EGL10.EGL_DEPTH_SIZE, depthSize,
EGL10.EGL_STENCIL_SIZE, stencilSize, EGL10.EGL_NONE
});
value = new int[1];
this.redSize = redSize;
this.greenSize = greenSize;
this.blueSize = blueSize;
this.alphaSize = alphaSize;
this.depthSize = depthSize;
this.stencilSize = stencilSize;
}
@Override
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display, EGLConfig[] configs) {
for (EGLConfig config : configs) {
int d = findConfigAttrib(egl, display, config, EGL10.EGL_DEPTH_SIZE, 0);
int s = findConfigAttrib(egl, display, config, EGL10.EGL_STENCIL_SIZE, 0);
if ((d >= depthSize) && (s >= stencilSize)) {
int r = findConfigAttrib(egl, display, config, EGL10.EGL_RED_SIZE, 0);
int g = findConfigAttrib(egl, display, config, EGL10.EGL_GREEN_SIZE, 0);
int b = findConfigAttrib(egl, display, config, EGL10.EGL_BLUE_SIZE, 0);
int a = findConfigAttrib(egl, display, config, EGL10.EGL_ALPHA_SIZE, 0);
if ((r == redSize) && (g == greenSize) && (b == blueSize) && (a == alphaSize)) {
return config;
}
}
}
return null;
}
private int findConfigAttrib(EGL10 egl, EGLDisplay display, EGLConfig config, int attribute,
int defaultValue) {
if (egl.eglGetConfigAttrib(display, config, attribute, value)) {
return value[0];
}
return defaultValue;
}
private int[] value;
// Subclasses can adjust these values:
protected int redSize;
protected int greenSize;
protected int blueSize;
protected int alphaSize;
protected int depthSize;
protected int stencilSize;
}
/**
* This class will choose a RGB_888 surface with
* or without a depth buffer.
*/
private class SimpleEGLConfigChooser extends ComponentSizeChooser {
public SimpleEGLConfigChooser(boolean withDepthBuffer) {
super(8, 8, 8, 0, withDepthBuffer ? 16 : 0, 0);
}
}
private class GLThread extends HandlerThread implements Callback {
private static final int GL_INIT = 1;
private static final int GL_RELEASE = 2;
private static final int GL_PROCESS_IMG = 3;
private static final int GL_UPDATE_SURFACE = 5;
private static final String IMAGE_PATH = "imagePath";
private static final String IMAGE_RATE = "imageRate";
private Handler mHandler;
private EGLHelper mHelper;
GLThread(String name) {
super(name);
}
public boolean handleMessage(Message msg) {
switch (msg.what) {
case GL_INIT:
mHelper.initGL();
break;
case GL_RELEASE:
this.interrupt();
this.quit();
mHelper.release();
break;
case GL_PROCESS_IMG:
final String imagePath = msg.getData().getString(IMAGE_PATH);
Bitmap bitmap = BitmapFactory.decodeFile(imagePath);
mHelper.updateSurface(bitmap.getWidth(), bitmap.getHeight());
runAll(runOnProcess);
mHelper.processImage(bitmap);
break;
case GL_UPDATE_SURFACE:
mHelper.updateSurface(msg.arg1, msg.arg2);
break;
}
return false;
}
public synchronized void start() {
super.start();
mHelper = new EGLHelper(GPUProcessor.this);
this.mHandler = new Handler(this.getLooper(), this);
}
public void init() {
Message msg = Message.obtain();
msg.what = GL_INIT;
this.mHandler.sendMessage(msg);
}
public void release() {
this.mHandler.sendEmptyMessage(GL_RELEASE);
}
public void updateSurface(int surfaceWidth, int surfaceHeight) {
Message msg = Message.obtain();
msg.arg1 = surfaceWidth;
msg.arg2 = surfaceHeight;
msg.what = GL_UPDATE_SURFACE;
this.mHandler.sendMessage(msg);
}
public void processImage(String imagePath, int imageRate) {
Message msg = Message.obtain();
msg.what = GL_PROCESS_IMG;
Bundle bundle = new Bundle();
bundle.putString(IMAGE_PATH, imagePath);
bundle.putInt(IMAGE_RATE, imageRate);
msg.setData(bundle);
this.mHandler.sendMessage(msg);
}
}
}
package com.dong.opencamera.core.filter;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
/**
* @author : liaojidong
* @e-mail : liaojidong@yy.com
* @date : 2019-12-04 13:44
* @desc :
*/
public interface IVideoFilter {
void onInit();
void onDraw(final int inputTexture,
final int targetFrameBuffer,
final FloatBuffer vertexPosBuffer,
final FloatBuffer texturePosBuffer,
final ShortBuffer order);
void onFilterChanged(int surfaceWidth, int surfaceHeight);
}
package com.dong.opencamera.utils;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import android.os.Environment;
import android.util.Log;
import com.dong.opencamera.core.utils.FileUtils;
import java.io.File;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.nio.ShortBuffer;
import javax.microedition.khronos.opengles.GL10;
/**
* @author : liaojidong
* @e-mail : liaojidong@yy.com
* @date : 2019-11-19 18:03
* @desc :
*/
public class GLHelper {
private static final String TAG = GLHelper.class.getSimpleName();
public static int createOESTextureObject() {
int[] tex = new int[1];
//生成一个纹理
GLES20.glGenTextures(1, tex, 0);
//将此纹理绑定到外部纹理上
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, tex[0]);
//设置纹理过滤参数
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
return tex[0];
}
public static void createCamFrameBuff(int[] frameBuffer, int[] frameBufferTex, int width, int height) {
GLES20.glGenTextures(1, frameBufferTex, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, frameBufferTex[0]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0,
GLES20.GL_RGBA, width, height,
0, GLES20.GL_RGBA,
GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glGenFramebuffers(1, frameBuffer, 0);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBuffer[0]);
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER,
GLES20.GL_COLOR_ATTACHMENT0,
GLES20.GL_TEXTURE_2D,
frameBufferTex[0], 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
checkGlError("createCamFrameBuff");
}
public static void checkGlError(String op) {
int error = GLES20.glGetError();
if (error != GLES20.GL_NO_ERROR) {
String msg = op + ": glError 0x" + Integer.toHexString(error);
Log.d(TAG, msg);
throw new RuntimeException(msg);
}
}
public static int buildProgram(int vertexShader, int fragmentShader) {
// 创建空的OpenGL ES程序
int mProgram = GLES20.glCreateProgram();
// 添加顶点着色器到程序中
GLES20.glAttachShader(mProgram, vertexShader);
// 添加片段着色器到程序中
GLES20.glAttachShader(mProgram, fragmentShader);
// 创建OpenGL ES程序可执行文件
GLES20.glLinkProgram(mProgram);
// 输出编译错误
int[] status = new int[1];
GLES20.glGetProgramiv(mProgram, GLES20.GL_LINK_STATUS, status, 0);
if (GLES20.GL_FALSE == status[0]) {
Log.e(TAG, "link program,failed:" + GLES20.glGetProgramInfoLog(mProgram));
}
// 释放shader资源
GLES20.glDeleteShader(vertexShader);
GLES20.glDeleteShader(fragmentShader);
return mProgram;
}
public static int loadTexture(Context context, int resourceId) {
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inScaled = false;//Opengl需要非压缩形式的原始数据
final Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(), resourceId, options);
if (bitmap == null) {
Log.w(TAG, "ResourceId:" + resourceId + "could not be decoded");
return 0;
}
return loadTexture(bitmap);
}
public static int loadTexture(Bitmap bitmap) {
/*
* 第一步 : 创建纹理对象
*/
final int[] textureObjectId = new int[1];//用于存储返回的纹理对象ID
GLES20.glGenTextures(1, textureObjectId, 0);
if (textureObjectId[0] == 0) {//若返回为0,,则创建失败
Log.w(TAG, "Could not generate a new Opengl texture object");
return 0;
}
//第二步: 加载位图数据并与纹理绑定
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inScaled = false;//Opengl需要非压缩形式的原始数据
if (bitmap == null) {
GLES20.glDeleteTextures(1, textureObjectId, 0);
return 0;
}
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureObjectId[0]);//通过纹理ID进行绑定
// 第三步: 设置纹理过滤
//设置缩小时为三线性过滤
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_LINEAR);
//设置放大时为双线性过滤
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
// 第四步: 加载纹理到Opengl并返回ID
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
bitmap.recycle();
GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
return textureObjectId[0];
}
public static int loadShader(int type, String shaderCode) {
int shader = GLES20.glCreateShader(type);
// 添加上面编写的着色器代码并编译它
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
public static FloatBuffer convertToFloatBuffer(float[] buffer) {
FloatBuffer fb = ByteBuffer.allocateDirect(buffer.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
fb.put(buffer);
fb.position(0);
return fb;
}
public static ShortBuffer convertToShortBuffer(short[] data) {
ByteBuffer buffer = ByteBuffer.allocateDirect(data.length * 2);
buffer.order(ByteOrder.nativeOrder());
ShortBuffer shortBuffer = buffer.asShortBuffer();
shortBuffer.put(data);
shortBuffer.position(0);
return shortBuffer;
}
public static int createProgram(Context context, String vertexFileName, String fragmentFileName) {
//读取glsl脚本
String vertexSource = AssetsUtils.readShade(context, vertexFileName);
int vertexShader = GLHelper.loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
String fragmentSource = AssetsUtils.readShade(context, fragmentFileName);
int fragmentShader = GLHelper.loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
// 创建空的OpenGL ES程序
return GLHelper.buildProgram(vertexShader, fragmentShader);
}
public static Bitmap frameBufferToBitmap(int frameBuffer, int width, int height) {
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBuffer);
try {
IntBuffer pixBuffer = IntBuffer.allocate(width * height);
GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixBuffer);
int[] glPixel = pixBuffer.array();
int[] argbPixel = new int[width * height];
GLHelper.openGLToBitmapColor(glPixel, argbPixel, width, height);
return Bitmap.createBitmap(argbPixel,
width,
height,
Bitmap.Config.ARGB_8888);
} catch (Exception e) {
e.printStackTrace();
} finally {
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
}
return null;
}
public static native void openGLToBitmapColor(int[] src,int[] dst, int width,int height);
static {
System.loadLibrary("native-lib");
}
}
使用
val GLManager = GPUProcessor()
GLManager.setFilter(VideoColorFilter(activity))
GLManager.setProcessListener {
FileUtils.saveBitmap(it, File(Environment.getExternalStorageDirectory().absolutePath, "${System.currentTimeMillis()}.jpg"))
}
GLManager.processImage(Environment.getExternalStorageDirectory().absolutePath + "/12345.jpg", 0)
GLManager.processImage(Environment.getExternalStorageDirectory().absolutePath + "/1234.jpg", 0)