android native 编解码demo
1 创建解码器,设置编码格式,
#include <media/NdkMediaFormat.h>
#include <media/NdkMediaCodec.h>
#include <media/NdkMediaError.h>
const char * MIMETYPE_VIDEO_HEVC = "video/hevc";//h265
AMediaCodec *mCodec { nullptr };
// 创建解码器
mCodec = AMediaCodec_createDecoderByType(MIMETYPE_VIDEO_HEVC);
LOG_ALWAYS_FATAL_IF(mCodec == nullptr, "Failed to create decoder");
// 设置视频编码格式为 H.265/HEVC
AMediaFormat_setString(mInputFormat, AMEDIAFORMAT_KEY_MIME, MIMETYPE_VIDEO_HEVC); // H265
// AMediaFormat_setInt32 AMEDIAFORMAT_KEY_LOW_LATENCY 是一个常量,表示视频编码器的低延迟模式。参数 1 是一个整数值,用于启用低延迟模式。
// 启用低延迟模式可以减少编码器的处理延迟,从而实现更快的响应和更低的输入到输出延迟。这对于实时性要求较高的应用场景(如视频通话、直播等)非常重要。通过设置低延迟模式,可以最大限度地减少视频编码器的处理延迟,提高实时性能。
// 注意这里需要硬件支持低延迟模式
AMediaFormat_setInt32(mInputFormat, AMEDIAFORMAT_KEY_LOW_LATENCY, 1);
2 input 数据处理 同步模式
2.1 第一步:取buffer index
/**
* Get the index of the next available input buffer. An app will typically use this with
* getInputBuffer() to get a pointer to the buffer, then copy the data to be encoded or decoded
* into the buffer before passing it to the codec.
*
* Available since API level 21.
*/
ssize_t AMediaCodec_dequeueInputBuffer(AMediaCodec*, int64_t timeoutUs) __INTRODUCED_IN(21);
2.2 第二步:通过buffer index 获取buffer数据
/**
* Get an input buffer. The specified buffer index must have been previously obtained from
* dequeueInputBuffer, and not yet queued.
*
* Available since API level 21.
*/
uint8_t* AMediaCodec_getInputBuffer(AMediaCodec*, size_t idx, size_t *out_size) __INTRODUCED_IN(21);
2.3 第三步:queue buffer (具体查看 graphic bufferqueue 轮转)
/**
* Send the specified buffer to the codec for processing.
*/
media_status_t AMediaCodec_queueInputBuffer(AMediaCodec*, size_t idx,
_off_t_compat offset, size_t size,
uint64_t time, uint32_t flags) __INTRODUCED_IN(21);
3 output 数据处理 同步模式
3.1 - 第一步:取buffer index
/**
* Get the index of the next available buffer of processed data.
*
* Available since API level 21.
*/
ssize_t AMediaCodec_dequeueOutputBuffer(AMediaCodec*, AMediaCodecBufferInfo *info,
int64_t timeoutUs) __INTRODUCED_IN(21);
3.2 - 第二步:通过buffer index 获取buffer数据
/**
* Get an output buffer. The specified buffer index must have been previously obtained from
* dequeueOutputBuffer, and not yet queued.
*
* Available since API level 21.
*/
uint8_t* AMediaCodec_getOutputBuffer(AMediaCodec*, size_t idx, size_t *out_size) __INTRODUCED_IN(21);
3.3 - 第三步:buffer释放
/**
* If you are done with a buffer, use this call to return the buffer to
* the codec. If you previously specified a surface when configuring this
* video decoder you can optionally render the buffer.
*
* Available since API level 21.
*/
media_status_t AMediaCodec_releaseOutputBuffer(AMediaCodec*, size_t idx, bool render) __INTRODUCED_IN(21);
AMediaCodec_releaseOutputBuffer // 会触发 BufferQueueProducer::queueBuffer -> 进而触发回调 onFrameAvailable // 待确认
// onFrameAvailable is called from queueBuffer each time an additional frame becomes available
// for consumption.
总结看来,只有第三步不同,AMediaCodec_queueInputBuffer是数据入队等待消费,AMediaCodec_releaseOutputBuffer是释放数据。
编码和解码过程,InputBuffer和OutputBuffer就互相置换下。
解码:原始数据(视频流)-> 提取器AMediaExtractor->InputBuffer->OutputBuffer->帧数据(YUV420sp,PCM)
编码:帧数据(视频流)->InputBuffer->OutputBuffer->合成器AMediaMuxer
4 设置 frame listener, 重写 onFrameAvailable
class MyFrameListener : public ConsumerBase::FrameAvailableListener
{
sp<BufferItemConsumer> mConsumer;
public:
// 重写这个接口,可以处理解码后的graphicbuffer
virtual void onFrameAvailable(const BufferItem&/* item*/)
{
BufferItem buffer;
mConsumer->acquireBuffer(&buffer, 0);
}
};
AMediaFormat *mFormat ;
ssize_t bufidx;
sp<SurfaceComposerClient> client;
sp<SurfaceControl> surfaceControl;
sp<Surface> surface_out;
sp<Surface> surface;
sp<IGraphicBufferProducer> gbProducer;
sp<IGraphicBufferConsumer> gbConsumer;
sp<BufferItemConsumer> bufferConsumer;
sp<MyFrameListener> listener;
client = new SurfaceComposerClient();
//获取surface
surfaceControl = client->createSurface(String8("resize"), w, h, PIXEL_FORMAT_RGBA_8888, 0);
surface_out = surfaceControl->getSurface();
SurfaceComposerClient::Transaction{}
.setSize(surfaceControl, m_format_width, m_format_height)
.setLayer(surfaceControl, 0x40000000)
.setPosition(surfaceControl, 0, 0)
.show(surfaceControl)
.apply();
BufferQueue::createBufferQueue(&gbProducer, &gbConsumer);
bufferConsumer = new BufferItemConsumer(gbConsumer, GRALLOC_USAGE_HW_TEXTURE|GRALLOC_USAGE_HW_COMPOSER, 12, false);
listener = new MyFrameListener(bufferConsumer);
// listener->start_loop();
bufferConsumer->setName(String8("codec-test"));
bufferConsumer->setFrameAvailableListener(listener);
bufferConsumer->setDefaultBufferSize(w, h);
bufferConsumer->setDefaultBufferFormat(PIXEL_FORMAT_RGBA_8888);
bufferConsumer->setMaxAcquiredBufferCount(4);
surface = new Surface(gbProducer);
5 编解码demo
1同步模式demo参考
/frameworks/av/media/tests/benchmark/src/native/encoder/Encoder.cpp
/frameworks/av/media/tests/benchmark/src/native/decoder/Decoder.cpp
2 异步模式demo参考
2.1 c++
cts/tests/media/jni/NativeCodecTestBase.h
/cts/tests/media/jni/NativeCodecTestBase.cpp
2.2 java
/cts/tests/media/src/android/mediav2/cts/CodecTestBase.java