该文件直接复制粘贴的源网站为http://www.cnblogs.com/jayceli/archive/2013/04/08/3008472.html,希望博主不要生气,好东西就忍不住要收藏了
Android平台上简单的FramebufferObject示例。
FramebufferObject的概念就不说了,参考OpenGL ES 2.0 Programming Guide的第10章。
下面是render framebuffer到texture的例子。
代码的主要流程是:
创建framebuffer,绑定framebuffer,render framebuffer到texture,切换回system提供的framebuffer,利用之前产生的texture.
方便起见,两个render流程用的同样的shader.
下面是renderer的代码,Test7Renderer.java
package com.android.jayce.test;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import android.content.Context;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import android.os.SystemClock;
import com.android.jayce.test.R;
/**
* This class implements our custom renderer. Note that the GL10 parameter passed in is unused for OpenGL ES 2.0
* renderers -- the static class GLES20 is used instead.
*/
public class Test7Renderer implements GLSurfaceView.Renderer
{
/** Used for debug logs. */
private static final String TAG = "Test7Renderer";
private final Context mActivityContext;
/**
* Store the model matrix. This matrix is used to move models from object space (where each model can be thought
* of being located at the center of the universe) to world space.
*/
private float[] mModelMatrix = new float[16];
/**
* Store the view matrix. This can be thought of as our camera. This matrix transforms world space to eye space;
* it positions things relative to our eye.
*/
private float[] mViewMatrix = new float[16];
/** Store the projection matrix. This is used to project the scene onto a 2D viewport. */
private float[] mProjectionMatrix = new float[16];
/** Allocate storage for the final combined matrix. This will be passed into the shader program. */
private float[] mMVPMatrix = new float[16];
/** Store our model data in a float buffer. */
private final FloatBuffer mCubePositions;
private final FloatBuffer mCubeColors;
private final FloatBuffer mCubeTextureCoordinates;
/** This will be used to pass in the transformation matrix. */
private int mMVPMatrixHandle;
/** This will be used to pass in the modelview matrix. */
private int mMVMatrixHandle;
/** This will be used to pass in the texture. */
private int mTextureUniformHandle;
/** This will be used to pass in model position information. */
private int mPositionHandle;
/** This will be used to pass in model color information. */
private int mColorHandle;
/** This will be used to pass in model texture coordinate information. */
private int mTextureCoordinateHandle;
/** How many bytes per float. */
private final int mBytesPerFloat = 4;
/** Size of the position data in elements. */
private final int mPositionDataSize = 3;
/** Size of the color data in elements. */
private final int mColorDataSize = 4;
/** Size of the texture coordinate data in elements. */
private final int mTextureCoordinateDataSize = 2;
/** This is a handle to our cube shading program. */
private int mProgramHandle;
/** This is a handle to our texture data. */
private int mTextureDataHandle;
/**
* Initialize the model data.
*/
public Test7Renderer(final Context activityContext)
{
mActivityContext = activityContext;
// Define points for a cube.
// X, Y, Z
final float[] cubePositionData =
{
// In OpenGL counter-clockwise winding is default. This means that when we look at a triangle,
// if the points are counter-clockwise we are looking at the "front". If not we are looking at
// the back. OpenGL has an optimization where all back-facing triangles are culled, since they
// usually represent the backside of an object and aren't visible anyways.
// Front face
-1.0f, 1.0f, 1.0f,
-1.0f, -1.0f, 1.0f,
1.0f, 1.0f, 1.0f,
-1.0f, -1.0f, 1.0f,
1.0f, -1.0f, 1.0f,
1.0f, 1.0f, 1.0f,
// Right face
1.0f, 1.0f, 1.0f,
1.0f, -1.0f, 1.0f,
1.0f, 1.0f, -1.0f,
1.0f, -1.0f, 1.0f,
1.0f, -1.0f, -1.0f,
1.0f, 1.0f, -1.0f,
// Back face
1.0f, 1.0f, -1.0f,
1.0f, -1.0f, -1.0f,
-1.0f, 1.0f, -1.0f,
1.0f, -1.0f, -1.0f,
-1.0f, -1.0f, -1.0f,
-1.0f, 1.0f, -1.0f,
// Left face
-1.0f, 1.0f, -1.0f,
-1.0f, -1.0f, -1.0f,
-1.0f, 1.0f, 1.0f,
-1.0f, -1.0f, -1.0f,
-1.0f, -1.0f, 1.0f,
-1.0f, 1.0f, 1.0f,
// Top face
-1.0f, 1.0f, -1.0f,
-1.0f, 1.0f, 1.0f,
1.0f, 1.0f, -1.0f,
-1.0f, 1.0f, 1.0f,
1.0f, 1.0f, 1.0f,
1.0f, 1.0f, -1.0f,
// Bottom face
1.0f, -1.0f, -1.0f,
1.0f, -1.0f, 1.0f,
-1.0f, -1.0f, -1.0f,
1.0f, -1.0f, 1.0f,
-1.0f, -1.0f, 1.0f,
-1.0f, -1.0f, -1.0f,
};
// R, G, B, A
final float[] cubeColorData =
{
// Front face (red)
1.0f, 0.0f, 0.0f, 1.0f,
1.0f, 0.0f, 0.0f, 1.0f,
1.0f, 0.0f, 0.0f, 1.0f,
1.0f, 0.0f, 0.0f, 1.0f,
1.0f, 0.0f, 0.0f, 1.0f,
1.0f, 0.0f, 0.0f, 1.0f,
// Right face (green)
0.0f, 1.0f, 0.0f, 1.0f,
0.0f, 1.0f, 0.0f, 1.0f,
0.0f, 1.0f, 0.0f, 1.0f,
0.0f, 1.0f, 0.0f, 1.0f,
0.0f, 1.0f, 0.0f, 1.0f,
0.0f, 1.0f, 0.0f, 1.0f,
// Back face (blue)
0.0f, 0.0f, 1.0f, 1.0f,
0.0f, 0.0f, 1.0f, 1.0f,
0.0f, 0.0f, 1.0f, 1.0f,
0.0f, 0.0f, 1.0f, 1.0f,
0.0f, 0.0f, 1.0f, 1.0f,
0.0f, 0.0f, 1.0f, 1.0f,
// Left face (yellow)
1.0f, 1.0f, 0.0f, 1.0f,
1.0f, 1.0f, 0.0f, 1.0f,
1.0f, 1.0f, 0.0f, 1.0f,
1.0f, 1.0f, 0.0f, 1.0f,
1.0f, 1.0f, 0.0f, 1.0f,
1.0f, 1.0f, 0.0f, 1.0f,
// Top face (cyan)
0.0f, 1.0f, 1.0f, 1.0f,
0.0f, 1.0f, 1.0f, 1.0f,
0.0f, 1.0f, 1.0f, 1.0f,
0.0f, 1.0f, 1.0f, 1.0f,
0.0f, 1.0f, 1.0f, 1.0f,
0.0f, 1.0f, 1.0f, 1.0f,
// Bottom face (magenta)
1.0f, 0.0f, 1.0f, 1.0f,
1.0f, 0.0f, 1.0f, 1.0f,
1.0f, 0.0f, 1.0f, 1.0f,
1.0f, 0.0f, 1.0f, 1.0f,
1.0f, 0.0f, 1.0f, 1.0f,
1.0f, 0.0f, 1.0f, 1.0f
};
// S, T (or X, Y)
// Texture coordinate data.
// Because images have a Y axis pointing downward (values increase as you move down the image) while
// OpenGL has a Y axis pointing upward, we adjust for that here by flipping the Y axis.
// What's more is that the texture coordinates are the same for every face.
final float[] cubeTextureCoordinateData =
{
// Front face
0.0f, 0.0f,
0.0f, 1.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
1.0f, 0.0f,
// Right face
0.0f, 0.0f,
0.0f, 1.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
1.0f, 0.0f,
// Back face
0.0f, 0.0f,
0.0f, 1.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
1.0f, 0.0f,
// Left face
0.0f, 0.0f,
0.0f, 1.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
1.0f, 0.0f,
// Top face
0.0f, 0.0f,
0.0f, 1.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
1.0f, 0.0f,
// Bottom face
0.0f, 0.0f,
0.0f, 1.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
1.0f, 0.0f
};
// Initialize the buffers.
mCubePositions = ByteBuffer.allocateDirect(cubePositionData.length * mBytesPerFloat)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mCubePositions.put(cubePositionData).position(0);
mCubeColors = ByteBuffer.allocateDirect(cubeColorData.length * mBytesPerFloat)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mCubeColors.put(cubeColorData).position(0);
mCubeTextureCoordinates = ByteBuffer.allocateDirect(cubeTextureCoordinateData.length * mBytesPerFloat)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mCubeTextureCoordinates.put(cubeTextureCoordinateData).position(0);
}
protected String getVertexShader(int shader)
{
return ToolsUtil.readTextFileFromRawResource(mActivityContext, shader);
}
protected String getFragmentShader(int shader)
{
return ToolsUtil.readTextFileFromRawResource(mActivityContext, shader);
}
@Override
public void onSurfaceCreated(GL10 glUnused, EGLConfig config)
{
// Set the background clear color to black.
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
// Use culling to remove back faces.
GLES20.glEnable(GLES20.GL_CULL_FACE);
// Enable depth testing
GLES20.glEnable(GLES20.GL_DEPTH_TEST);
// The below glEnable() call is a holdover from OpenGL ES 1, and is not needed in OpenGL ES 2.
// Enable texture mapping
GLES20.glEnable(GLES20.GL_TEXTURE_2D);
// Position the eye in front of the origin.
final float eyeX = 0.0f;
final float eyeY = 0.0f;
final float eyeZ = -0.5f;
// We are looking toward the distance
final float lookX = 0.0f;
final float lookY = 0.0f;
final float lookZ = -5.0f;
// Set our up vector. This is where our head would be pointing were we holding the camera.
final float upX = 0.0f;
final float upY = 1.0f;
final float upZ = 0.0f;
// Set the view matrix. This matrix can be said to represent the camera position.
// NOTE: In OpenGL 1, a ModelView matrix is used, which is a combination of a model and
// view matrix. In OpenGL 2, we can keep track of these matrices separately if we choose.
Matrix.setLookAtM(mViewMatrix, 0, eyeX, eyeY, eyeZ, lookX, lookY, lookZ, upX, upY, upZ);
final String vertexShader = getVertexShader(R.raw.per_pixel_vertex_shader);
final String fragmentShader = getFragmentShader(R.raw.per_pixel_fragment_shader);
final int vertexShaderHandle = ToolsUtil.compileShader(GLES20.GL_VERTEX_SHADER, vertexShader);
final int fragmentShaderHandle = ToolsUtil.compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentShader);
mProgramHandle = ToolsUtil.createAndLinkProgram(vertexShaderHandle, fragmentShaderHandle,
new String[] {"a_Position", "a_Color", "a_TexCoordinate"});
// Load the texture
mTextureDataHandle = ToolsUtil.loadTexture(mActivityContext, R.drawable.aaa);
}
@Override
public void onSurfaceChanged(GL10 glUnused, int width, int height)
{
// Set the OpenGL viewport to the same size as the surface.
GLES20.glViewport(0, 0, width, height);
// Create a new perspective projection matrix. The height will stay the same
// while the width will vary as per aspect ratio.
final float ratio = (float) width / height;
final float left = -ratio;
final float right = ratio;
final float bottom = -1.0f;
final float top = 1.0f;
final float near = 1.0f;
final float far = 10.0f;
Matrix.frustumM(mProjectionMatrix, 0, left, right, bottom, top, near, far);
}
@Override
public void onDrawFrame(GL10 glUnused)
{
IntBuffer framebuffer = IntBuffer.allocate(1);
IntBuffer depthRenderbuffer = IntBuffer.allocate(1);
IntBuffer texture = IntBuffer.allocate(1);
int texWidth = 480, texHeight = 480;
IntBuffer maxRenderbufferSize = IntBuffer.allocate(1);
GLES20.glGetIntegerv(GLES20.GL_MAX_RENDERBUFFER_SIZE, maxRenderbufferSize);
// check if GL_MAX_RENDERBUFFER_SIZE is >= texWidth and texHeight
if((maxRenderbufferSize.get(0) <= texWidth) ||
(maxRenderbufferSize.get(0) <= texHeight))
{
// cannot use framebuffer objects as we need to create
// a depth buffer as a renderbuffer object
// return with appropriate error
}
// generate the framebuffer, renderbuffer, and texture object names
GLES20.glGenFramebuffers(1, framebuffer);
GLES20.glGenRenderbuffers(1, depthRenderbuffer);
GLES20.glGenTextures(1, texture);
// bind texture and load the texture mip-level 0
// texels are RGB565
// no texels need to be specified as we are going to draw into
// the texture
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texture.get(0));
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, texWidth, texHeight,
0, GLES20.GL_RGB, GLES20.GL_UNSIGNED_SHORT_5_6_5, null);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
// bind renderbuffer and create a 16-bit depth buffer
// width and height of renderbuffer = width and height of
// the texture
GLES20.glBindRenderbuffer(GLES20.GL_RENDERBUFFER, depthRenderbuffer.get(0));
GLES20.glRenderbufferStorage(GLES20.GL_RENDERBUFFER, GLES20.GL_DEPTH_COMPONENT16,
texWidth, texHeight);
// bind the framebuffer
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, framebuffer.get(0));
// specify texture as color attachment
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
GLES20.GL_TEXTURE_2D, texture.get(0), 0);
// specify depth_renderbufer as depth attachment
GLES20.glFramebufferRenderbuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_DEPTH_ATTACHMENT,
GLES20.GL_RENDERBUFFER, depthRenderbuffer.get(0));
// check for framebuffer complete
int status = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER);
if(status == GLES20.GL_FRAMEBUFFER_COMPLETE)
{
// render to texture using FBO
GLES20.glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
// Do a complete rotation every 10 seconds.
long time = SystemClock.uptimeMillis() % 10000L;
float angleInDegrees = (360.0f / 10000.0f) * (2 * (int) time);
GLES20.glUseProgram(mProgramHandle);
// Set program handles for cube drawing.
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgramHandle, "u_MVPMatrix");
mMVMatrixHandle = GLES20.glGetUniformLocation(mProgramHandle, "u_MVMatrix");
mTextureUniformHandle = GLES20.glGetUniformLocation(mProgramHandle, "u_Texture");
mPositionHandle = GLES20.glGetAttribLocation(mProgramHandle, "a_Position");
mColorHandle = GLES20.glGetAttribLocation(mProgramHandle, "a_Color");
mTextureCoordinateHandle = GLES20.glGetAttribLocation(mProgramHandle, "a_TexCoordinate");
// Set the active texture unit to texture unit 0.
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
// Bind the texture to this unit.
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDataHandle);
// Tell the texture uniform sampler to use this texture in the shader by binding to texture unit 0.
GLES20.glUniform1i(mTextureUniformHandle, 0);
Matrix.setIdentityM(mModelMatrix, 0);
Matrix.translateM(mModelMatrix, 0, 0.0f, -1.0f, -5.0f);
Matrix.rotateM(mModelMatrix, 0, angleInDegrees, 1.0f, 1.0f, 0.0f);
drawCube();
// render to window system provided framebuffer
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
// Do a complete rotation every 10 seconds.
time = SystemClock.uptimeMillis() % 10000L;
angleInDegrees = (360.0f / 10000.0f) * ((int) time);
GLES20.glUseProgram(mProgramHandle);
// Set program handles for cube drawing.
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgramHandle, "u_MVPMatrix");
mMVMatrixHandle = GLES20.glGetUniformLocation(mProgramHandle, "u_MVMatrix");
mTextureUniformHandle = GLES20.glGetUniformLocation(mProgramHandle, "u_Texture");
mPositionHandle = GLES20.glGetAttribLocation(mProgramHandle, "a_Position");
mColorHandle = GLES20.glGetAttribLocation(mProgramHandle, "a_Color");
mTextureCoordinateHandle = GLES20.glGetAttribLocation(mProgramHandle, "a_TexCoordinate");
// Set the active texture unit to texture unit 0.
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
// Bind the texture to this unit.
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texture.get(0)/*mTextureDataHandle*/);
// Tell the texture uniform sampler to use this texture in the shader by binding to texture unit 0.
GLES20.glUniform1i(mTextureUniformHandle, 0);
Matrix.setIdentityM(mModelMatrix, 0);
Matrix.translateM(mModelMatrix, 0, 0.0f, 0.0f, -5.0f);
Matrix.rotateM(mModelMatrix, 0, angleInDegrees, 1.0f, 1.0f, 0.0f);
drawCube();
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
}
// cleanup
GLES20.glDeleteRenderbuffers(1, depthRenderbuffer);
GLES20.glDeleteFramebuffers(1, framebuffer);
GLES20.glDeleteTextures(1, texture);
}
/**
* Draws a cube.
*/
private void drawCube()
{
// Pass in the position information
mCubePositions.position(0);
GLES20.glVertexAttribPointer(mPositionHandle, mPositionDataSize, GLES20.GL_FLOAT, false,
0, mCubePositions);
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Pass in the color information
mCubeColors.position(0);
GLES20.glVertexAttribPointer(mColorHandle, mColorDataSize, GLES20.GL_FLOAT, false,
0, mCubeColors);
GLES20.glEnableVertexAttribArray(mColorHandle);
// Pass in the texture coordinate information
mCubeTextureCoordinates.position(0);
GLES20.glVertexAttribPointer(mTextureCoordinateHandle, mTextureCoordinateDataSize, GLES20.GL_FLOAT, false,
0, mCubeTextureCoordinates);
GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle);
// This multiplies the view matrix by the model matrix, and stores the result in the MVP matrix
// (which currently contains model * view).
Matrix.multiplyMM(mMVPMatrix, 0, mViewMatrix, 0, mModelMatrix, 0);
// Pass in the modelview matrix.
GLES20.glUniformMatrix4fv(mMVMatrixHandle, 1, false, mMVPMatrix, 0);
// This multiplies the modelview matrix by the projection matrix, and stores the result in the MVP matrix
// (which now contains model * view * projection).
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);
// Pass in the combined matrix.
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);
// Draw the cube.
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 36);
}
}
还有一个辅助类,ToolsUtil.java:
package com.android.jayce.test;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import android.util.Log;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
public class ToolsUtil
{
public static int loadTexture(final Context context, final int resourceId)
{
final int[] textureHandle = new int[1];
GLES20.glGenTextures(1, textureHandle, 0);
if(textureHandle[0] != 0)
{
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inScaled = false;
final Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(), resourceId, options);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle[0]);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
bitmap.recycle();
}
if(textureHandle[0] == 0)
{
throw new RuntimeException("failed to load texture");
}
return textureHandle[0];
}
/**
* Helper function to compile a shader.
*
* @param shaderType The shader type.
* @param shaderSource The shader source code.
* @return An OpenGL handle to the shader.
*/
public static int compileShader(final int shaderType, final String shaderSource)
{
int shaderHandle = GLES20.glCreateShader(shaderType);
if (shaderHandle != 0)
{
// Pass in the shader source.
GLES20.glShaderSource(shaderHandle, shaderSource);
// Compile the shader.
GLES20.glCompileShader(shaderHandle);
// Get the compilation status.
final int[] compileStatus = new int[1];
GLES20.glGetShaderiv(shaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
// If the compilation failed, delete the shader.
if (compileStatus[0] == 0)
{
GLES20.glDeleteShader(shaderHandle);
shaderHandle = 0;
}
}
if (shaderHandle == 0)
{
throw new RuntimeException("Error creating shader.");
}
return shaderHandle;
}
/**
* Helper function to compile and link a program.
*
* @param vertexShaderHandle An OpenGL handle to an already-compiled vertex shader.
* @param fragmentShaderHandle An OpenGL handle to an already-compiled fragment shader.
* @param attributes Attributes that need to be bound to the program.
* @return An OpenGL handle to the program.
*/
public static int createAndLinkProgram(final int vertexShaderHandle, final int fragmentShaderHandle, final String[] attributes)
{
int programHandle = GLES20.glCreateProgram();
if (programHandle != 0)
{
// Bind the vertex shader to the program.
GLES20.glAttachShader(programHandle, vertexShaderHandle);
// Bind the fragment shader to the program.
GLES20.glAttachShader(programHandle, fragmentShaderHandle);
// Bind attributes
if (attributes != null)
{
final int size = attributes.length;
for (int i = 0; i < size; i++)
{
GLES20.glBindAttribLocation(programHandle, i, attributes[i]);
}
}
// Link the two shaders together into a program.
GLES20.glLinkProgram(programHandle);
// Get the link status.
final int[] linkStatus = new int[1];
GLES20.glGetProgramiv(programHandle, GLES20.GL_LINK_STATUS, linkStatus, 0);
// If the link failed, delete the program.
if (linkStatus[0] == 0)
{
GLES20.glDeleteProgram(programHandle);
programHandle = 0;
}
}
if (programHandle == 0)
{
throw new RuntimeException("Error creating program.");
}
return programHandle;
}
public static String readTextFileFromRawResource(final Context context,
final int resourceId)
{
final InputStream inputStream = context.getResources().openRawResource(
resourceId);
final InputStreamReader inputStreamReader = new InputStreamReader(
inputStream);
final BufferedReader bufferedReader = new BufferedReader(
inputStreamReader);
String nextLine;
final StringBuilder body = new StringBuilder();
try
{
while ((nextLine = bufferedReader.readLine()) != null)
{
body.append(nextLine);
body.append('\n');
}
}
catch (IOException e)
{
return null;
}
return body.toString();
}
}
使用的shader, per_pixel_vertex_shader.glsl:
uniform mat4 u_MVPMatrix; // A constant representing the combined model/view/projection matrix.
uniform mat4 u_MVMatrix; // A constant representing the combined model/view matrix.
attribute vec4 a_Position; // Per-vertex position information we will pass in.
attribute vec4 a_Color; // Per-vertex color information we will pass in.
attribute vec2 a_TexCoordinate; // Per-vertex texture coordinate information we will pass in.
varying vec3 v_Position; // This will be passed into the fragment shader.
varying vec4 v_Color; // This will be passed into the fragment shader.
varying vec2 v_TexCoordinate; // This will be passed into the fragment shader.
// The entry point for our vertex shader.
void main()
{
// Transform the vertex into eye space.
v_Position = vec3(u_MVMatrix * a_Position);
// Pass through the color.
v_Color = a_Color;
// Pass through the texture coordinate.
v_TexCoordinate = a_TexCoordinate;
// gl_Position is a special variable used to store the final position.
// Multiply the vertex by the matrix to get the final point in normalized screen coordinates.
gl_Position = u_MVPMatrix * a_Position;
}
使用的shader, per_pixel_fragment_shader.glsl:
precision mediump float; // Set the default precision to medium. We don't need as high of a
// precision in the fragment shader.
uniform sampler2D u_Texture; // The input texture.
varying vec3 v_Position; // Interpolated position for this fragment.
varying vec4 v_Color; // This is the color from the vertex shader interpolated across the
varying vec2 v_TexCoordinate; // Interpolated texture coordinate per fragment.
// The entry point for our fragment shader.
void main()
{
// Multiply the color by the diffuse illumination level and texture value to get final output color.
gl_FragColor = (v_Color * texture2D(u_Texture, v_TexCoordinate));
}
好了,就这么多了,可以看到旋转立方体每一面的texture都是用的自己创建的framebuffer render的texture,每面都有一个旋转的立方体。
好了,就这么多了,可以看到旋转立方体每一面的texture都是用的自己创建的framebuffer render的texture,每面都有一个旋转的立方体。
看看效果图吧: