package com.example.mytv;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.media.Image;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.net.Uri;
import android.util.Log;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import static android.media.MediaExtractor.SEEK_TO_CLOSEST_SYNC;
public class XMMediaMetadataRetriever {
private MediaExtractor mExtractor = new MediaExtractor();
private MediaFormat mMediaFormat;
private static String TAG = "XMMediaMetadataRetriever";
public void setDataSource(Context context,Uri uri) throws IOException {
mExtractor.setDataSource(context,uri,null);
initExtractor();
}
public void setDataSource(String filePath) throws IOException {
mExtractor.setDataSource(filePath);
initExtractor();
}
private void initExtractor() {
int trackIndex = -1;
int numTracks = mExtractor.getTrackCount();
for (int i = 0; i < numTracks; i++) {
MediaFormat mediaFormat = mExtractor.getTrackFormat(i);
String name = mediaFormat.getString(MediaFormat.KEY_MIME);
if (name.contains("video/")) {
trackIndex = i;
break;
}
}
if (trackIndex == -1) return;
mExtractor.selectTrack(trackIndex);
mMediaFormat = mExtractor.getTrackFormat(trackIndex);
mMediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);
}
public Bitmap getFrameAtTime(long time){
MediaCodec mDecoder = null;
Bitmap bitmap = null;
String mime = mMediaFormat.getString(MediaFormat.KEY_MIME);
try {
mDecoder = MediaCodec.createDecoderByType(mime);
} catch (IOException e) {
e.printStackTrace();
}
mDecoder.configure(mMediaFormat,null,null,0);
mDecoder.start();
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
boolean sawInputEOS = false;
boolean sawOutputEOS = false;
mExtractor.seekTo(time,SEEK_TO_CLOSEST_SYNC);
while (!sawOutputEOS) {
if (!sawInputEOS){
int inputBufferId = mDecoder.dequeueInputBuffer(1000*10);
if (inputBufferId >= 0) {
ByteBuffer inputBuffer = mDecoder.getInputBuffer(inputBufferId);
int sampleSize = mExtractor.readSampleData(inputBuffer,0);
if (sampleSize < 0) {
mDecoder.queueInputBuffer(inputBufferId,0,0,0L,MediaCodec.BUFFER_FLAG_END_OF_STREAM);
sawInputEOS = true;
} else {
long presentationTimeUs = mExtractor.getSampleTime();
mDecoder.queueInputBuffer(inputBufferId,0,sampleSize,presentationTimeUs,0);
}
}
}
int outputBufferId = mDecoder.dequeueOutputBuffer(info,1000*10);
if (outputBufferId >= 0) {
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
sawOutputEOS = true;
}
boolean doRender = (info.size != 0);
if (doRender) {
Image image = mDecoder.getOutputImage(outputBufferId);
bitmap = compressToBitmap(image);
sawOutputEOS = true;
}
}
}
mDecoder.stop();
return bitmap;
}
private Bitmap compressToBitmap(Image image) {
ByteArrayOutputStream stream = new ByteArrayOutputStream();
Rect rect = image.getCropRect();
YuvImage yuvImage = new YuvImage(getDataFromImage(image, COLOR_FormatNV21), ImageFormat.NV21, rect.width(), rect.height(), null);
yuvImage.compressToJpeg(rect, 100, stream);
Bitmap bmp = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());
return bmp;
}
private static final int COLOR_FormatI420 = 1;
private static final int COLOR_FormatNV21 = 2;
private byte[] getDataFromImage(Image image, int colorFormat) {
if (colorFormat != COLOR_FormatI420 && colorFormat != COLOR_FormatNV21) {
throw new IllegalArgumentException("only support COLOR_FormatI420 " + "and COLOR_FormatNV21");
}
if (!isImageFormatSupported(image)) {
throw new RuntimeException("can't convert Image to byte array, format " + image.getFormat());
}
Rect crop = image.getCropRect();
int format = image.getFormat();
int width = crop.width();
int height = crop.height();
Image.Plane[] planes = image.getPlanes();
byte[] data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8];
byte[] rowData = new byte[planes[0].getRowStride()];
int channelOffset = 0;
int outputStride = 1;
for (int i = 0; i < planes.length; i++) {
switch (i) {
case 0:
channelOffset = 0;
outputStride = 1;
break;
case 1:
if (colorFormat == COLOR_FormatI420) {
channelOffset = width * height;
outputStride = 1;
} else if (colorFormat == COLOR_FormatNV21) {
channelOffset = width * height + 1;
outputStride = 2;
}
break;
case 2:
if (colorFormat == COLOR_FormatI420) {
channelOffset = (int) (width * height * 1.25);
outputStride = 1;
} else if (colorFormat == COLOR_FormatNV21) {
channelOffset = width * height;
outputStride = 2;
}
break;
}
ByteBuffer buffer = planes[i].getBuffer();
int rowStride = planes[i].getRowStride();
int pixelStride = planes[i].getPixelStride();
Log.v(TAG, "pixelStride " + pixelStride);
Log.v(TAG, "rowStride " + rowStride);
Log.v(TAG, "width " + width);
Log.v(TAG, "height " + height);
Log.v(TAG, "buffer size " + buffer.remaining());
int shift = (i == 0) ? 0 : 1;
int w = width >> shift;
int h = height >> shift;
buffer.position(rowStride * (crop.top >> shift) + pixelStride * (crop.left >> shift));
for (int row = 0; row < h; row++) {
int length;
if (pixelStride == 1 && outputStride == 1) {
length = w;
buffer.get(data, channelOffset, length);
channelOffset += length;
} else {
length = (w - 1) * pixelStride + 1;
buffer.get(rowData, 0, length);
for (int col = 0; col < w; col++) {
data[channelOffset] = rowData[col * pixelStride];
channelOffset += outputStride;
}
}
if (row < h - 1) {
buffer.position(buffer.position() + rowStride - length);
}
}
Log.d(TAG,"Finished reading data from plane " + i);
}
return data;
}
private static boolean isImageFormatSupported(Image image) {
int format = image.getFormat();
switch (format) {
case ImageFormat.YUV_420_888:
case ImageFormat.NV21:
case ImageFormat.YV12:
return true;
}
return false;
}
public void release(){
if (null != mExtractor) mExtractor.release();
}
}