前段时间拿到需求,需要在视频编辑模块中增加一个视频逆序播放的功能,就是从视频的最后一帧往前播放到第一帧。拿到需求时一脸懵逼,使用普通简洁的方式根本做不了,编码后的视频都是由I帧、P帧、B帧三种格式的帧组成的。思索良久后发现可以使用视频原始数据YUV数据来一帧帧绘制完成,废话不多说,直接上代码。
package com.yi.moments.capture;
import android.content.Intent;
import android.graphics.Bitmap;
import android.opengl.GLSurfaceView;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Environment;
import android.text.TextUtils;
import android.view.View;
import android.view.ViewGroup;
import android.view.Window;
import android.view.WindowManager;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.LinearLayout;
import com.yi.moments.constants.ConstKey;
import com.yi.moments.spore.ImageUtilEngine;
import com.yi.moments.spore.SporeRender;
import com.yi.moments.R;
import com.yi.moments.activity.BaseActivity;
import com.yi.moments.log.YiLog;
import com.yi.moments.util.ImageCompressUtil;
import com.yi.moments.util.ScreenUtil;
import com.yi.moments.util.ToastHelper;
import com.yi.moments.util.YUVUtils;
import com.yi.moments.view.VideoTimeSelectBar;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
public class VideoRecyclerPlayerActivity extends BaseActivity implements View.OnClickListener {
private static final String TAG = "VideoRecyclerPlayerActivity";
private static final int PICTURE_COUNT = 9; //截取图片的数量
private static final int VIDEO_PLAY_NORMAL = 0; //正序
private static final int VIDEO_PLAY_REVERSE = 1; //逆序
private static final int VIDEO_PLAY_RECYCLER = 2; //循环
private ImageView btnVideoPlayState;
private GLSurfaceView mProcessView;
private FrameLayout mProcessView_Layout;
private LinearLayout recyclerBackgroundLayout;
private VideoTimeSelectBar videoTimeSelectBar;
private int width = 720;
private int height = 1280;
private int videoPlayOrder;
private int sizePreFrame;
private int playStartIndex;
private int playEndIndex;
private int frameRate = 15;
private boolean isComposeVideo;
private String sourceDataPath = Environment.getExternalStorageDirectory().getAbsolutePath()+"/yuvData.yuv";
private SporeRender mRender;
private ImageUtilEngine imageEngine;
private RecyclerThread recyclerThread;
private List<Bitmap> backgroundList = new ArrayList<>();
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().requestFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.activity_video_recycler_player);
imageEngine = new ImageUtilEngine();
btnVideoPlayState = findView(R.id.btnVideoPlayState);
mProcessView_Layout = findView(R.id.process_view_layout);
videoTimeSelectBar = findView(R.id.videoTimeSelectBar);
recyclerBackgroundLayout = findView(R.id.recyclerBackgroundLayout);
mProcessView = new GLSurfaceView(this);
mProcessView_Layout.addView(mProcessView, 0);
mRender = new SporeRender();
mProcessView.setRenderer(mRender);
btnVideoPlayState.setOnClickListener(this);
findView(R.id.imgEditBack).setOnClickListener(this);
findView(R.id.btnEditFinish).setOnClickListener(this);
videoTimeSelectBar.setOnSelectTimeLiatener(new VideoTimeSelectBar.OnSelectTimeListener() {
@Override
public void onUpdate(int startIndex, int endIndex) {
playStartIndex = startIndex;
playEndIndex = endIndex;
recyclerThread.setFrameIndex(playStartIndex);
}
});
Intent intent = getIntent();
width = intent.getIntExtra(ConstKey.KEY_YUV_DATA_WIDTH, width);
height = intent.getIntExtra(ConstKey.KEY_YUV_DATA_HEIGHT, height);
int mFrameRate = intent.getIntExtra(ConstKey.KEY_YUV_DATA_FRAME_RATE, frameRate);
if(mFrameRate > 0){
frameRate = mFrameRate;
}
String mSourceDataPath = intent.getStringExtra(ConstKey.KEY_YUV_DATA_PATH);
if(!TextUtils.isEmpty(mSourceDataPath)){
sourceDataPath = mSourceDataPath;
}
sizePreFrame = (int)(width * height * 1.5);
PictureAsyncTask asyncTask = new PictureAsyncTask();
asyncTask.execute(sourceDataPath);
}
private void showYUV(){
recyclerThread = new RecyclerThread(sizePreFrame);
recyclerThread.start();
}
@Override
protected void onResume() {
super.onResume();
if(recyclerThread != null){
recyclerThread.setSuspend(false);
}else {
showYUV();
}
}
@Override
protected void onPause() {
super.onPause();
if(recyclerThread != null){
//pause thread
recyclerThread.setSuspend(true);
}
}
@Override
protected void onDestroy() {
super.onDestroy();
if(recyclerThread != null){
recyclerThread.stopThread();
recyclerThread = null;
}
if(backgroundList != null){
for(Bitmap bitmap : backgroundList){
bitmap.recycle();
}
backgroundList.clear();
}
doInUI(new Runnable() {
@Override
public void run() {
if(imageEngine != null){
imageEngine.release(0);
imageEngine = null;
}
}
}, 100);
}
@Override
public void onBackPressed() {
if(!isComposeVideo) {
super.onBackPressed();
}
}
@Override
public void onClick(View view) {
switch (view.getId()){
case R.id.imgEditBack:
onBackPressed();
break;
case R.id.btnEditFinish:
startComposeVideo();
break;
case R.id.btnVideoPlayState:
updatePlayOrder();
break;
}
}
private void startComposeVideo(){
showLoading();
String outputPath = Environment.getExternalStorageDirectory().getAbsolutePath()+"/compose.mp4";
VideoComposeTask videoComposeTask = new VideoComposeTask();
videoComposeTask.execute(sourceDataPath, outputPath);
}
private void updatePlayOrder(){
videoPlayOrder++;
if(videoPlayOrder > VIDEO_PLAY_RECYCLER){
videoPlayOrder = VIDEO_PLAY_NORMAL;
}
int maxSecond = recyclerThread.getFrameSize() / frameRate;
switch (videoPlayOrder){
case VIDEO_PLAY_NORMAL:
btnVideoPlayState.setImageResource(R.drawable.ic_video_play_normal);
recyclerThread.setFrameIndex(0);
videoTimeSelectBar.updateMaxSpaceSecond(maxSecond);
break;
case VIDEO_PLAY_REVERSE:
btnVideoPlayState.setImageResource(R.drawable.ic_video_play_reverse);
recyclerThread.setFrameIndex(recyclerThread.getFrameSize() - 1);
videoTimeSelectBar.updateMaxSpaceSecond(maxSecond);
break;
case VIDEO_PLAY_RECYCLER:
btnVideoPlayState.setImageResource(R.drawable.ic_video_play_recycler);
playEndIndex = recyclerThread.getFrameSize() / 2;
recyclerThread.setFrameIndex(0);
recyclerThread.setReverse(true);
videoTimeSelectBar.updateMaxSpaceSecond(maxSecond/2);
break;
}
}
private void showVideoBackground(List<Bitmap> mList){
if(mList == null){
return;
}
for(Bitmap bitmap : mList){
ImageView imageView = new ImageView(this);
imageView.setScaleType(ImageView.ScaleType.FIT_XY);
ViewGroup.LayoutParams params = new ViewGroup.LayoutParams(ScreenUtil.screenWidth/9, ScreenUtil.dip2px(56));
imageView.setLayoutParams(params);
imageView.setImageBitmap(bitmap);
recyclerBackgroundLayout.addView(imageView);
}
}
private class RecyclerThread extends Thread{
private int sizePreFrame;
private int frameIndex = 0;
private int frameSize = 0;
private boolean isReverse = true;
private boolean suspend = false;
private volatile boolean stop = false;
private Object control = new Object();
public RecyclerThread(int sizePreFrame){
this.sizePreFrame = sizePreFrame;
}
public void setSuspend(boolean suspend) {
if (!suspend) {
synchronized (control) {
control.notifyAll();
}
}
this.suspend = suspend;
}
public void setReverse(boolean isReverse){
this.isReverse = isReverse;
}
public void setFrameIndex(int frameIndex){
this.frameIndex = frameIndex;
}
public int getFrameSize(){
return frameSize;
}
public void stopThread(){
stop = true;
}
@Override
public void run() {
super.run();
RandomAccessFile randomAccessFile = null;
File yuvFile = new File(sourceDataPath);
try {
byte[] yuvData = new byte[sizePreFrame];
randomAccessFile = new RandomAccessFile(yuvFile, "rw");
frameSize = (int)(yuvFile.length() / sizePreFrame);
videoTimeSelectBar.initSelectBar(frameSize, frameRate);
playStartIndex = 0;
playEndIndex = frameSize;
while (!stop){
synchronized (control) {
if (suspend) {
try {
control.wait();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
randomAccessFile.seek(frameIndex * sizePreFrame);
randomAccessFile.read(yuvData);
//so将yuv转换成rgb
int[] buf = imageEngine.decodeYUV420SP(yuvData, width, height, 0);
mRender.update(buf, width, height);
if(videoPlayOrder == VIDEO_PLAY_NORMAL){
frameIndex++;
if(frameIndex == playEndIndex){
frameIndex = playStartIndex;
}
}else if(videoPlayOrder == VIDEO_PLAY_REVERSE){
frameIndex--;
if(frameIndex < playStartIndex){
frameIndex = playEndIndex - 1;
}
}else {
if(isReverse){
frameIndex++;
if(frameIndex == playEndIndex -1){
isReverse = false;
}
}else {
frameIndex--;
if(frameIndex == playStartIndex){
isReverse = true;
}
}
}
}
}catch (Exception e){
YiLog.d(TAG,"Exception:"+e.toString());
}finally {
try {
if(randomAccessFile != null) {
randomAccessFile.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
private class PictureAsyncTask extends AsyncTask<String, Void, Boolean>{
@Override
protected Boolean doInBackground(String... params) {
boolean result = false;
File yuvFile = new File(params[0]);
RandomAccessFile randomAccessFile = null;
try {
byte[] pictureData = new byte[sizePreFrame];
randomAccessFile = new RandomAccessFile(yuvFile, "rw");
int frameSize = (int)(yuvFile.length() / sizePreFrame);
int spaceSize = frameSize / PICTURE_COUNT;
int index = 0;
while (index < PICTURE_COUNT) {
randomAccessFile.seek(index * spaceSize * sizePreFrame);
randomAccessFile.read(pictureData);
int[] imageBuf = imageEngine.decodeYUV420SP(pictureData, width, height, 1);
Bitmap mBitmap = Bitmap.createBitmap(imageBuf, width, height, Bitmap.Config.RGB_565);
Bitmap tempBitmap = ImageCompressUtil.compressScale(mBitmap, ScreenUtil.screenWidth/9, ScreenUtil.dip2px(56));
backgroundList.add(tempBitmap);
index++;
}
result = true;
} catch (Exception e) {
YiLog.d(TAG,"Exception:"+e.toString());
e.printStackTrace();
}finally {
imageEngine.release(1);
try {
if(randomAccessFile != null) {
randomAccessFile.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
return result;
}
@Override
protected void onPostExecute(Boolean result) {
super.onPostExecute(result);
if(result){
showVideoBackground(backgroundList);
}
}
}
private class VideoComposeTask extends AsyncTask<String, Void, Boolean>{
@Override
protected Boolean doInBackground(String... params) {
int frameIndex = 0;
isComposeVideo = true;
boolean isReverse = false;
boolean result = false;
boolean isComposeFinish = false;
File yuvFile = new File(params[0]);
RandomAccessFile randomAccessFile = null;
VideoEncoderCore videoEncoderCore = null;
try {
byte[] pictureData = new byte[sizePreFrame];
videoEncoderCore = new VideoEncoderCore(params[1], width, height, frameRate);
randomAccessFile = new RandomAccessFile(yuvFile, "rw");
if(videoPlayOrder == VIDEO_PLAY_NORMAL){
frameIndex = playStartIndex;
}else if(videoPlayOrder == VIDEO_PLAY_REVERSE){
frameIndex = playEndIndex -1;
}else {
isReverse = true;
frameIndex = playStartIndex;
}
while (!isComposeFinish){
randomAccessFile.seek(frameIndex * sizePreFrame);
randomAccessFile.read(pictureData);
YUVUtils.nv21To420SP(pictureData, width, height);
ByteBuffer buf = ByteBuffer.wrap(pictureData);
boolean isEncoder = videoEncoderCore.encode(buf, pictureData.length, videoEncoderCore.getPTSUs(frameRate), isComposeFinish);
if(isEncoder){
videoEncoderCore.drainEncoder();
}
if(videoPlayOrder == VIDEO_PLAY_NORMAL){
frameIndex++;
if(frameIndex == playEndIndex){
isComposeFinish = true;
}
}else if(videoPlayOrder == VIDEO_PLAY_REVERSE){
frameIndex--;
if(frameIndex < playStartIndex){
isComposeFinish = true;
}
}else {
if(isReverse){
frameIndex++;
if(frameIndex == playEndIndex -1){
isReverse = false;
}
}else {
frameIndex--;
if(frameIndex < playStartIndex){
isComposeFinish = true;
}
}
}
}
result = true;
}catch (Exception e){
YiLog.d(TAG,"Exception:"+e.toString());
e.printStackTrace();
}finally {
try {
if(videoEncoderCore != null){
videoEncoderCore.release();
}
if(randomAccessFile != null) {
randomAccessFile.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
isComposeVideo = false;
return result;
}
@Override
protected void onPostExecute(Boolean result) {
super.onPostExecute(result);
dismissLoading();
YiLog.d(TAG, "VideoComposeTask result:"+result);
if(result){
ToastHelper.showLongMessage("compose success");
}else {
ToastHelper.showLongMessage("compose fail");
}
}
}
}
/**
* Name : Texture2D.java
* Copyright : Copyright (c) Tencent Inc. All rights reserved.
* Description : TODO
*/
package com.yi.moments.spore;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.opengl.GLUtils;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import javax.microedition.khronos.opengles.GL10;
/**
* @author ianmao
*
*/
public class Texture2D {
long mTime = System.currentTimeMillis();
private Bitmap mBitmap = null;
private int textureId = 0;
private int[] mTextures = new int[1];
public void delete(GL10 gl){
if (textureId != 0){
gl.glDeleteTextures(1, new int[] { textureId }, 0);
textureId = 0;
}
// bitmap
if (mBitmap != null){
if (mBitmap.isRecycled())
mBitmap.recycle();
mBitmap = null;
}
}
public void bind(GL10 gl, Bitmap bmp) {
Bitmap bitmap = Bitmap.createBitmap(bmp.getWidth(), bmp.getHeight(), bmp.hasAlpha() ? Bitmap.Config.ARGB_8888
: Bitmap.Config.RGB_565);
Canvas canvas = new Canvas(bitmap);
canvas.drawBitmap(bmp, 0, 0, null);
mBitmap = bitmap;
if(textureId != 0){
gl.glDeleteTextures(1, mTextures, 0);
textureId = 0;
}else{
gl.glGenTextures(1, mTextures, 0);
textureId = mTextures[0];
gl.glBindTexture(GL10.GL_TEXTURE_2D, textureId);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER,
GL10.GL_LINEAR);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER,
GL10.GL_LINEAR);
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, mBitmap, 0);
}
gl.glBindTexture(GL10.GL_TEXTURE_2D, textureId);
}
public void bind(GL10 gl, byte[] data, int width, int height){
if(textureId != 0){
gl.glDeleteTextures(1, mTextures, 0);
textureId = 0;
}else{
gl.glGenTextures(1, mTextures, 0);
textureId = mTextures[0];
gl.glBindTexture(GL10.GL_TEXTURE_2D, textureId);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER,
GL10.GL_LINEAR);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER,
GL10.GL_LINEAR);
gl.glTexImage2D(GL10.GL_TEXTURE_2D, 0, GL10.GL_LUMINANCE, width, height, 0, GL10.GL_LUMINANCE, GL10.GL_UNSIGNED_BYTE, ByteBuffer.wrap(data));
}
gl.glBindTexture(GL10.GL_TEXTURE_2D, textureId);
}
public void bind(GL10 gl, int[] data, int width, int height){
if(textureId != 0){
gl.glDeleteTextures(1, mTextures, 0);
textureId = 0;
}else{
gl.glGenTextures(1, mTextures, 0);
textureId = mTextures[0];
gl.glBindTexture(GL10.GL_TEXTURE_2D, textureId);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER,
GL10.GL_LINEAR);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER,
GL10.GL_LINEAR);
gl.glTexImage2D(GL10.GL_TEXTURE_2D, 0, GL10.GL_LUMINANCE, width, height, 0, GL10.GL_LUMINANCE, GL10.GL_UNSIGNED_BYTE, IntBuffer.wrap(data));
}
gl.glBindTexture(GL10.GL_TEXTURE_2D, textureId);
}
public static FloatBuffer mBuffer;
public static FloatBuffer floatToBuffer(float[] a){
ByteBuffer mbb = ByteBuffer.allocateDirect(a.length * 4);
mbb.order(ByteOrder.nativeOrder());
mBuffer = mbb.asFloatBuffer();
mBuffer.put(a);
mBuffer.position(0);
return mBuffer;
}
public void draw(GL10 gl, float x, float y){
gl.glEnable(GL10.GL_TEXTURE_2D);
gl.glEnable(GL10.GL_CULL_FACE);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
mTime = System.currentTimeMillis();
float[] f1 = new float[] {
-0.5f, 0f,
0f, 0f,
-0.5f, 0.5f,
0f, 0.5f,
};
FloatBuffer verticleBuffer = floatToBuffer(f1);
FloatBuffer coordBuffer = floatToBuffer(new float[] {
0f, 1f,
1f, 1f,
0f, 0f,
1f, 0,
});
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, coordBuffer);
gl.glVertexPointer(2, GL10.GL_FLOAT, 0, verticleBuffer);
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
float[] f2 = new float[] {
-1f, -1.2f,
1.2f, -1.2f,
-1f, 1f,
1.2f, 1f,
};
FloatBuffer verticleBuffer1 = floatToBuffer(f2);
gl.glVertexPointer(2, GL10.GL_FLOAT, 0, verticleBuffer1);
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
gl.glDisable(GL10.GL_CULL_FACE);
gl.glDisable(GL10.GL_TEXTURE_2D);
}
}
/**
* Name : SporeRender.java
* Copyright : Copyright (c) Tencent Inc. All rights reserved.
* Description : TODO
*/
package com.yi.moments.spore;
import android.graphics.Bitmap;
import android.graphics.Bitmap.Config;
import android.opengl.GLSurfaceView;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
public class SporeRender implements GLSurfaceView.Renderer{
private Bitmap mBitmap;
private Texture2D mTexture2d;
public SporeRender(){
mTexture2d = new Texture2D();
}
public void update(int[] data,int width,int height) {
mBitmap = Bitmap.createBitmap(data, width, height, Config.RGB_565);
}
public void update(Bitmap bmp) {
mBitmap = bmp;
}
public void onDrawFrame(GL10 gl){
if(mBitmap == null){
return;
}
gl.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glLoadIdentity();
gl.glTranslatef(0f, 0f, 0f);
mTexture2d.bind(gl, mBitmap);
mTexture2d.draw(gl, 0, 0);
mTexture2d.delete(gl);
}
public void onSurfaceChanged(GL10 gl, int width, int height){
gl.glViewport(0, 0, width, height);
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
}
public void onSurfaceCreated(GL10 gl, EGLConfig config){
gl.glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
gl.glClearDepthf(1.0f);
gl.glEnable(GL10.GL_DEPTH_TEST);
gl.glDepthFunc(GL10.GL_LEQUAL);
//gl.glDisable(GL10.GL_DITHER);
gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT, GL10.GL_NICEST);
//gl.glClearColor(1, 1, 1, 1);
gl.glEnable(GL10.GL_CULL_FACE);
gl.glShadeModel(GL10.GL_SMOOTH);
}
}