学习目标:
android 音视频 mediaCodec解析视频学习内容:
我们解析视频的时候我们可以使用android 提供的apiMediaPlayer
,实现简单的播放暂停,但是当我们需要提取视频中的一帧或者编辑视频,就很难实现了,所以我们需要另一个apiMediaCodec(硬解,控制DSP芯片)或者 ffmpeg(软解,耗时),我们选择的优先级一定要硬解大于软解。
从上图我们可以知道,左边是输入数据有几个空的buff是给我们放我们传过来的数据的(YUV),右边则是DSP芯片编码出来的数据。注意,这两个区域我们需要在数据使用过后及时清空使用的地方,不然会造成buff全部被占完,倒是dsp芯片卡死的现象。
接下来我们就来实现。
- 读取一个H264文件渲染到surface.
- 读取一个H264文件其中的一帧为图片
public class MainActivity extends AppCompatActivity {
private H264Player h264Player;
private File videoFile;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
//要求在sd卡目录下有h264文件
videoFile = new File(Environment.getExternalStorageDirectory(), "out.h264");
}
@Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
if (requestCode == 1) {
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
//用户同意了权限申请
initSurfaceView();
} else {
//用户拒绝了权限申请,建议向用户解释权限用途
}
}
}
private void initSurfaceView() {
SurfaceView surfaceView = new SurfaceView(this);
LinearLayout linearLayout = findViewById(R.id.content);
LinearLayout.LayoutParams params = new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT);
linearLayout.addView(surfaceView, params);
SurfaceHolder holder = surfaceView.getHolder();
holder.addCallback(new SurfaceHolder.Callback() {
@Override
public void surfaceCreated(@NonNull SurfaceHolder holder) {
//渲染到surface
h264Player = new H264Player(MainActivity.this, videoFile.getAbsolutePath(), holder.getSurface());
//渲染为图片
// h264Player = new H264PlayerImage(MainActivity.this,videoFile.getAbsolutePath());
h264Player.play();
}
@Override
public void surfaceChanged(@NonNull SurfaceHolder holder, int format, int width, int height) {
}
@Override
public void surfaceDestroyed(@NonNull SurfaceHolder holder) {
}
});
}
public void play(View view) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && checkSelfPermission(
Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
requestPermissions(new String[]{
Manifest.permission.READ_EXTERNAL_STORAGE,
Manifest.permission.WRITE_EXTERNAL_STORAGE
}, 1);
} else {
initSurfaceView();
}
}
}
读取一个H264文件渲染到surface.
public class H264Player implements Runnable {
private static final String TAG = "H264_PLAYER";
private String videoPath;
private Surface surface;
private MediaCodec mediaCodec;
private Context context;
public H264Player(Context context, String videoPath, Surface surface) {
this.videoPath = videoPath;
this.surface = surface;
this.context = context;
try {
mediaCodec = MediaCodec.createDecoderByType("video/avc");
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", 368, 364);
//设置帧率
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 15);
mediaCodec.configure(mediaFormat, this.surface, null, 0);
} catch (IOException e) {
Log.e(TAG, "init media_codec failure,core didn't support!");
e.printStackTrace();
}
}
public void play() {
mediaCodec.start();
new Thread(this).start();
}
@Override
public void run() {
decode();
}
private void decode() {
byte[] bytes = null;
try {
bytes = getBytes(videoPath);
ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
//分隔符开始位置
int current_frame_start_index = 0;
//数据大小
int totalSize = bytes.length;
while (true) {
if (totalSize == 0 || current_frame_start_index >= totalSize) {
break;
}
//下一帧开始的位置
int next_frame_start_index = findSeperateStart(bytes, current_frame_start_index + 2);
int available_index = mediaCodec.dequeueInputBuffer(10);
if (available_index >= 0) {
ByteBuffer inputBuffer = inputBuffers[available_index];
inputBuffer.clear();
if (current_frame_start_index != -1) {
inputBuffer.put(bytes, current_frame_start_index, next_frame_start_index - current_frame_start_index);
}
mediaCodec.queueInputBuffer(available_index, 0, next_frame_start_index - current_frame_start_index, 0, 0);
current_frame_start_index = next_frame_start_index;
} else {
continue;
}
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
//获取解析后数据存放的地址(下标)
int outIndex = mediaCodec.dequeueOutputBuffer(info, 10000);
if (outIndex >= 0) {
//休眠一会儿 不然会很快 当然 这里也可以通过计算的出来,为了方便就先这样
Thread.sleep(33);
//注意这里的参数 我给了一个true,大家可以点进这个方法看一下这个参数的含义
//@param render If a valid surface was specified when configuring the codec,
//passing true renders this output buffer to the surface.
//意思就是 如果为true:就是把数据渲染到你的surface上。
mediaCodec.releaseOutputBuffer(outIndex, true);
} else {
Log.e(TAG, "解码失败");
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
//找到分隔符开始的位置
private int findSeperateStart(byte[] bytes, int start) {
int totalSize = bytes.length;
for (int i = start; i < totalSize - 4; i++) {
if (bytes[i] == 0x00 && bytes[i + 1] == 0x00 && bytes[i + 2] == 0x00 && bytes[i + 3] == 0x01) {
return i;
}
}
return -1;
}
//将文件全部读取为byte数组
//不适合大文件视频的读取,大文件请边读取边解析。
private byte[] getBytes(String path) throws IOException {
InputStream is = new DataInputStream(new FileInputStream(new File(path)));
int len;
int size = 1024;
byte[] buf;
ByteArrayOutputStream bos = new ByteArrayOutputStream();
buf = new byte[size];
while ((len = is.read(buf, 0, size)) != -1)
bos.write(buf, 0, len);
buf = bos.toByteArray();
return buf;
}
}
读取一个H264文件其中的一帧为图片
public class H264PlayerImage implements Runnable {
private static final String TAG = "H264_PLAYER";
private String videoPath;
private MediaCodec mediaCodec;
private Context context;
public H264PlayerImage(Context context, String videoPath) {
this.videoPath = videoPath;
this.context = context;
try {
//根据传入不同的字符串 可以解析不同的格式
mediaCodec = MediaCodec.createDecoderByType("video/avc");
//368 364 这里分别是宽高,为了方便就没通过代码解析,但是这是可以解析出来的(哥伦布编码)
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", 368, 364);
//设置帧率
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 15);
mediaCodec.configure(mediaFormat, null, null, 0);
} catch (IOException e) {
Log.e(TAG, "init media_codec failure,core didn't support!");
e.printStackTrace();
}
}
public void play() {
mediaCodec.start();
new Thread(this).start();
}
@Override
public void run() {
decode();
}
private void decode() {
byte[] bytes = null;
try {
bytes = getBytes(videoPath);
ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
//分隔符开始位置
int current_frame_start_index = 0;
//数据大小
int totalSize = bytes.length;
while (true) {
if (totalSize == 0 || current_frame_start_index >= totalSize) {
break;
}
//下一帧开始的位置
int next_frame_start_index = findSeperateStart(bytes, current_frame_start_index + 2);
//查10ms
int available_index = mediaCodec.dequeueInputBuffer(10);
if (available_index >= 0) {
ByteBuffer inputBuffer = inputBuffers[available_index];
inputBuffer.clear();
if (current_frame_start_index != -1) {
inputBuffer.put(bytes, current_frame_start_index, next_frame_start_index - current_frame_start_index);
}
mediaCodec.queueInputBuffer(available_index, 0, next_frame_start_index - current_frame_start_index, 0, 0);
current_frame_start_index = next_frame_start_index;
} else {
continue;
}
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
//获取解析后数据存放的地址(下标)
int outIndex = mediaCodec.dequeueOutputBuffer(info, 10000);
if (outIndex >= 0) {
//休眠一会儿 不然会很快 当然 这里也可以通过计算的出来,为了方便就先这样
Thread.sleep(33);
ByteBuffer outputBuffer = null;
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) {
outputBuffer = mediaCodec.getOutputBuffer(outIndex);
}
outputBuffer.limit(info.size);
byte[] b = new byte[outputBuffer.remaining()];
outputBuffer.get(b);
//转换YUV数据
YuvImage yuvImage = new YuvImage(b, ImageFormat.NV21, 368, 384, null);
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
yuvImage.compressToJpeg(new Rect(0, 0, 368, 384), 100, byteArrayOutputStream);
byte[] temp = byteArrayOutputStream.toByteArray();
Bitmap bitmap = BitmapFactory.decodeByteArray(temp, 0, temp.length);
//生成第一帧图像存放到sd卡
if (bitmap != null) {
File firstFrame = new File(Environment.getExternalStorageDirectory(),"first_Frame.png");
BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(firstFrame));
bitmap.compress(Bitmap.CompressFormat.JPEG,80,bos);
bos.flush();
bos.close();
break;
}
//注意这里的参数 false,大家可以点进这个方法看一下这个参数的含义
//@param render If a valid surface was specified when configuring the codec,
//passing true renders this output buffer to the surface.
//意思就是 如果为true:就是把数据渲染到你的surface上。
mediaCodec.releaseOutputBuffer(outIndex, false);
} else {
Log.e(TAG, "解码失败");
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
//找到分隔符开始的位置
private int findSeperateStart(byte[] bytes, int start) {
int totalSize = bytes.length;
for (int i = start; i < totalSize - 4; i++) {
if (bytes[i] == 0x00 && bytes[i + 1] == 0x00 && bytes[i + 2] == 0x00 && bytes[i + 3] == 0x01) {
return i;
}
}
return -1;
}
//将文件全部读取为byte数组
//不适合大文件视频的读取,大文件请边读取边解析。
private byte[] getBytes(String path) throws IOException {
InputStream is = new DataInputStream(new FileInputStream(new File(path)));
int len;
int size = 1024;
byte[] buf;
ByteArrayOutputStream bos = new ByteArrayOutputStream();
buf = new byte[size];
while ((len = is.read(buf, 0, size)) != -1)
bos.write(buf, 0, len);
buf = bos.toByteArray();
return buf;
}
}
到这里就差不多了项目链接在下面
或者你又想录屏生成H264码流
//在sd卡目录下生成codec.h264录屏文件
public class ScreenshotActivity extends AppCompatActivity {
//这两个api是录屏用的
private MediaProjectionManager mediaProjectionManager;
private MediaProjection mediaProjection;
//利用mediacodec来编码
private MediaCodec mediaCodec;
//录屏线程
private Thread thread;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_screenshort);
mediaProjectionManager = (MediaProjectionManager) getSystemService(Context.MEDIA_PROJECTION_SERVICE);
}
//开始录屏线程
public void start(View view) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && checkSelfPermission(
Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
requestPermissions(new String[]{
Manifest.permission.READ_EXTERNAL_STORAGE,
Manifest.permission.WRITE_EXTERNAL_STORAGE
}, 1);
} else {
Intent captureIntent = mediaProjectionManager.createScreenCaptureIntent();
startActivityForResult(captureIntent, 100);
}
}
//停止录屏线程
public void stop(View view) {
thread.interrupt();
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if(requestCode == 1 && resultCode == Activity.RESULT_OK){
Intent captureIntent = mediaProjectionManager.createScreenCaptureIntent();
startActivityForResult(captureIntent, 100);
}
if (requestCode == 100 && resultCode == Activity.RESULT_OK) {
mediaProjection = mediaProjectionManager.getMediaProjection
(resultCode, data);
initMediaCodec();
}
}
private void initMediaCodec() {
try {
//创建编码器
mediaCodec = MediaCodec.createEncoderByType("video/avc");
MediaFormat format = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC,
540, 960);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
//帧率
format.setInteger(MediaFormat.KEY_FRAME_RATE, 15);
//码率
format.setInteger(MediaFormat.KEY_BIT_RATE, 400_000);
//I帧间隔
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 2);
mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
final Surface surface = mediaCodec.createInputSurface();
thread = new Thread() {
@Override
public void run() {
mediaCodec.start();
//提供的虚拟surface 与MediaProjection关联
mediaProjection.createVirtualDisplay("screen-catch",
540, 960, 1,
DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC,
surface, null, null);
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
while (!this.isInterrupted()) {
//查询空闲的buffer
int index = mediaCodec.dequeueOutputBuffer(bufferInfo, 100000);
if (index >= 0) {
ByteBuffer buffer = mediaCodec.getOutputBuffer(index);
byte[] outData = new byte[bufferInfo.size];
buffer.get(outData);
//写入文件
writeBytes(outData);
mediaCodec.releaseOutputBuffer(index, false);
}
}
Log.e("oicq", "录屏结束!");
}
};
thread.start();
} catch (Exception e) {
e.printStackTrace();
}
}
//写文件
public void writeBytes(byte[] array) {
FileOutputStream writer = null;
try {
// 打开一个写文件器,构造函数中的第二个参数true表示以追加形式写文件
writer = new FileOutputStream(Environment.getExternalStorageDirectory() + "/codec.h264", true);
writer.write(array);
writer.write('\n');
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (writer != null) {
writer.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
@Override
protected void onDestroy() {
super.onDestroy();
}
}