mediacodec_encode_sample.java

34 篇文章 1 订阅
18 篇文章 0 订阅


  int width = 328, height = 248;
  int bitRate = 1000000;
  int frameRate = 15;
  String mimeType = "video/avc";
  int threshold = 50;
  int maxerror = 50;
   
  MediaCodec encoder, decoder = null;
  ByteBuffer[] encoderInputBuffers;
  ByteBuffer[] encoderOutputBuffers;
  ByteBuffer[] decoderInputBuffers = null;
  ByteBuffer[] decoderOutputBuffers = null;
   
  int numCodecs = MediaCodecList.getCodecCount();
  MediaCodecInfo codecInfo = null;
  for (int i = 0; i < numCodecs && codecInfo == null; i++) {
  MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
  if (!info.isEncoder()) {
  continue;
  }
  String[] types = info.getSupportedTypes();
  boolean found = false;
  for (int j = 0; j < types.length && !found; j++) {
  if (types[j].equals(mimeType))
  found = true;
  }
  if (!found)
  continue;
  codecInfo = info;
  }
  Log.d(TAG, "Found " + codecInfo.getName() + " supporting " + mimeType);
   
  int colorFormat = 0;
  MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(mimeType);
  for (int i = 0; i < capabilities.colorFormats.length && colorFormat == 0; i++) {
  int format = capabilities.colorFormats[i];
  switch (format) {
  case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
  case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
  case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
  case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
  case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
  colorFormat = format;
  break;
  default:
  Log.d(TAG, "Skipping unsupported color format " + format);
  break;
  }
  }
  assertTrue("no supported color format", colorFormat != 0);
  Log.d(TAG, "Using color format " + colorFormat);
   
  if (codecInfo.getName().equals("OMX.TI.DUCATI1.VIDEO.H264E")) {
  // This codec doesn't support a width not a multiple of 16,
  // so round down.
  width &= ~15;
  }
  int stride = width;
  int sliceHeight = height;
  if (codecInfo.getName().startsWith("OMX.Nvidia.")) {
  stride = (stride + 15)/16*16;
  sliceHeight = (sliceHeight + 15)/16*16;
  }
  encoder = MediaCodec.createByCodecName(codecInfo.getName());
  MediaFormat inputFormat = MediaFormat.createVideoFormat(mimeType, width, height);
  inputFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
  inputFormat.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate);
  inputFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
  inputFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 75);
  inputFormat.setInteger("stride", stride);
  inputFormat.setInteger("slice-height", sliceHeight);
  Log.d(TAG, "Configuring encoder with input format " + inputFormat);
  encoder.configure(inputFormat, null /* surface */, null /* crypto */, MediaCodec.CONFIGURE_FLAG_ENCODE);
  encoder.start();
  encoderInputBuffers = encoder.getInputBuffers();
  encoderOutputBuffers = encoder.getOutputBuffers();
   
  int chromaStride = stride/2;
  int frameSize = stride*sliceHeight + 2*chromaStride*sliceHeight/2;
  byte[] inputFrame = new byte[frameSize];
  if (colorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar ||
  colorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar) {
  for (int y = 0; y < height; y++) {
  for (int x = 0; x < width; x++) {
  int Y = (x + y) & 255;
  int Cb = 255*x/width;
  int Cr = 255*y/height;
  inputFrame[y*stride + x] = (byte) Y;
  inputFrame[stride*sliceHeight + (y/2)*chromaStride + (x/2)] = (byte) Cb;
  inputFrame[stride*sliceHeight + chromaStride*(sliceHeight/2) + (y/2)*chromaStride + (x/2)] = (byte) Cr;
  }
  }
  } else {
  for (int y = 0; y < height; y++) {
  for (int x = 0; x < width; x++) {
  int Y = (x + y) & 255;
  int Cb = 255*x/width;
  int Cr = 255*y/height;
  inputFrame[y*stride + x] = (byte) Y;
  inputFrame[stride*sliceHeight + 2*(y/2)*chromaStride + 2*(x/2)] = (byte) Cb;
  inputFrame[stride*sliceHeight + 2*(y/2)*chromaStride + 2*(x/2) + 1] = (byte) Cr;
  }
  }
  }
   
  // start encoding + decoding
  final long kTimeOutUs = 5000;
  MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
  boolean sawInputEOS = false;
  boolean sawOutputEOS = false;
  MediaFormat oformat = null;
  int errors = -1;
  int numInputFrames = 0;
  while (!sawOutputEOS && errors < 0) {
  if (!sawInputEOS) {
  int inputBufIndex = encoder.dequeueInputBuffer(kTimeOutUs);
   
  if (inputBufIndex >= 0) {
  ByteBuffer dstBuf = encoderInputBuffers[inputBufIndex];
   
  int sampleSize = frameSize;
  long presentationTimeUs = 0;
   
  if (numInputFrames >= 10) {
  Log.d(TAG, "saw input EOS.");
  sawInputEOS = true;
  sampleSize = 0;
  } else {
  dstBuf.clear();
  dstBuf.put(inputFrame);
  presentationTimeUs = numInputFrames*1000000/frameRate;
  numInputFrames++;
  }
   
  encoder.queueInputBuffer(
  inputBufIndex,
  0 /* offset */,
  sampleSize,
  presentationTimeUs,
  sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
  }
  }
   
  int res = encoder.dequeueOutputBuffer(info, kTimeOutUs);
   
  if (res >= 0) {
  int outputBufIndex = res;
  ByteBuffer buf = encoderOutputBuffers[outputBufIndex];
   
  buf.position(info.offset);
  buf.limit(info.offset + info.size);
   
  if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
   
  decoder = MediaCodec.createDecoderByType(mimeType);
  MediaFormat format = MediaFormat.createVideoFormat(mimeType, width, height);
  format.setByteBuffer("csd-0", buf);
  decoder.configure(format, null /* surface */, null /* crypto */, 0 /* flags */);
  decoder.start();
  decoderInputBuffers = decoder.getInputBuffers();
  decoderOutputBuffers = decoder.getOutputBuffers();
  } else {
  int decIndex = decoder.dequeueInputBuffer(-1);
  decoderInputBuffers[decIndex].clear();
  decoderInputBuffers[decIndex].put(buf);
  decoder.queueInputBuffer(decIndex, 0, info.size, info.presentationTimeUs, info.flags);
  }
   
  encoder.releaseOutputBuffer(outputBufIndex, false /* render */);
  } else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
  encoderOutputBuffers = encoder.getOutputBuffers();
   
  Log.d(TAG, "encoder output buffers have changed.");
  } else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
  MediaFormat encformat = encoder.getOutputFormat();
   
  Log.d(TAG, "encoder output format has changed to " + encformat);
  }
   
  if (decoder == null)
  res = MediaCodec.INFO_TRY_AGAIN_LATER;
  else
  res = decoder.dequeueOutputBuffer(info, kTimeOutUs);
   
  if (res >= 0) {
  int outputBufIndex = res;
  ByteBuffer buf = decoderOutputBuffers[outputBufIndex];
   
  buf.position(info.offset);
  buf.limit(info.offset + info.size);
   
  if (info.size > 0) {
  errors = checkFrame(buf, info, oformat, width, height, threshold);
  }
   
  decoder.releaseOutputBuffer(outputBufIndex, false /* render */);
   
  if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
  Log.d(TAG, "saw output EOS.");
  sawOutputEOS = true;
  }
  } else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
  decoderOutputBuffers = decoder.getOutputBuffers();
   
  Log.d(TAG, "decoder output buffers have changed.");
  } else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
  oformat = decoder.getOutputFormat();
   
  Log.d(TAG, "decoder output format has changed to " + oformat);
  }
   
  }
   
  encoder.stop();
  encoder.release();
  decoder.stop();
  decoder.release();
   
  assertTrue("no frame decoded", errors >= 0);
  assertTrue("decoding error too big: " + errors + "/" + maxerror, errors <= maxerror);
   

以下是一个简单的 Android MediaCodec 录制音频的完整代码示例: ```java import android.media.MediaCodec; import android.media.MediaCodecInfo; import android.media.MediaFormat; import android.media.MediaMuxer; import android.os.Build; import android.os.Environment; import android.util.Log; import java.io.File; import java.io.IOException; import java.nio.ByteBuffer; public class AudioRecorder { private static final String TAG = "AudioRecorder"; private static final String AUDIO_MIME_TYPE = "audio/mp4a-latm"; private static final int AUDIO_SAMPLE_RATE = 44100; // 44.1kHz private static final int AUDIO_CHANNEL_COUNT = 1; // mono private static final int AUDIO_BIT_RATE = 64000; private static final int BUFFER_SIZE = 2 * AudioRecord.getMinBufferSize(AUDIO_SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); private MediaCodec mEncoder; private MediaMuxer mMuxer; private int mTrackIndex = -1; private boolean mMuxerStarted = false; private long mStartTime = 0; public void startRecording() throws IOException { File outputFile = new File(Environment.getExternalStorageDirectory(), "audio_record.mp4"); mEncoder = MediaCodec.createEncoderByType(AUDIO_MIME_TYPE); MediaFormat format = new MediaFormat(); format.setString(MediaFormat.KEY_MIME, AUDIO_MIME_TYPE); format.setInteger(MediaFormat.KEY_SAMPLE_RATE, AUDIO_SAMPLE_RATE); format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, AUDIO_CHANNEL_COUNT); format.setInteger(MediaFormat.KEY_BIT_RATE, AUDIO_BIT_RATE); format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, BUFFER_SIZE); } mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); mEncoder.start(); mMuxer = new MediaMuxer(outputFile.getAbsolutePath(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers(); MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); mTrackIndex = -1; mMuxerStarted = false; int sampleRate = AUDIO_SAMPLE_RATE; int channelCount = AUDIO_CHANNEL_COUNT; long presentationTimeUs = 0; while (true) { int inputBufferIndex = mEncoder.dequeueInputBuffer(-1); if (inputBufferIndex >= 0) { ByteBuffer inputBuffer = mEncoder.getInputBuffer(inputBufferIndex); inputBuffer.clear(); int bytesRead = // read audio data into the inputBuffer if (bytesRead == -1) { // eof mEncoder.queueInputBuffer(inputBufferIndex, 0, 0, presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM); break; } else { int inputBufferCapacity = inputBuffer.capacity(); int size = inputBufferCapacity < bytesRead ? inputBufferCapacity : bytesRead; inputBuffer.put(audioData, 0, size); presentationTimeUs = (long) (1_000_000L * bytesRead / (2 * channelCount * sampleRate)); mEncoder.queueInputBuffer(inputBufferIndex, 0, size, presentationTimeUs, 0); } } int outputBufferIndex = mEncoder.dequeueOutputBuffer(bufferInfo, 0); if (outputBufferIndex >= 0) { ByteBuffer outputBuffer = encoderOutputBuffers[outputBufferIndex]; if (!mMuxerStarted) { MediaFormat newFormat = mEncoder.getOutputFormat(); mTrackIndex = mMuxer.addTrack(newFormat); mMuxer.start(); mStartTime = System.nanoTime(); mMuxerStarted = true; } if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { bufferInfo.size = 0; } if (bufferInfo.size != 0) { outputBuffer.position(bufferInfo.offset); outputBuffer.limit(bufferInfo.offset + bufferInfo.size); mMuxer.writeSampleData(mTrackIndex, outputBuffer, bufferInfo); } mEncoder.releaseOutputBuffer(outputBufferIndex, false); } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { MediaFormat newFormat = mEncoder.getOutputFormat(); mTrackIndex = mMuxer.addTrack(newFormat); mMuxer.start(); mStartTime = System.nanoTime(); mMuxerStarted = true; } } release(); } private void release() { if (mEncoder != null) { mEncoder.stop(); mEncoder.release(); mEncoder = null; } if (mMuxer != null) { if (mMuxerStarted) { mMuxer.stop(); } mMuxer.release(); mMuxer = null; } } } ``` 注意,这只是一个简单的示例,实际应用中还需要考虑错误处理、音频数据来源和如何停止录制等问题。
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值