Android中MediaCodec的使用

主屏幕信息的获取

SurfaceComposerClient类提供了获取主屏幕信息的方法,获取结果保存在DisplayInfo结构体中,如果要监听屏幕方向变化,则可以轮询getDisplayInfo( )函数来实现。目前,还没有想到更好的监听屏幕转向的方法,汗。。。,有知道的大神请及时告知一下((⊙﹏⊙)b)。

sp<IBinder> mainDpy = SurfaceComposerClient::getBuiltInDisplay(
        ISurfaceComposer::eDisplayIdMain);
DisplayInfo mainDpyInfo;
status_t err = SurfaceComposerClient::getDisplayInfo(mainDpy, &mainDpyInfo);
if (err != NO_ERROR) {
    fprintf(stderr, "ERROR: unable to get display characteristics\n");
    return err;
}
printf("Main display is %dx%d @%.2ffps (orientation=%u)\n",
        mainDpyInfo.w, mainDpyInfo.h, mainDpyInfo.fps,
        mainDpyInfo.orientation);

MediaCodec的初始化

这里借鉴screenrecord的源码,来分析一下使用MediaCodec编码时的初始化流程。

sp<MediaCodec> encoder;
sp<IGraphicBufferProducer> encoderInputSurface;
//按地址传递参数encoder和encoderInputSurface, 在prepareEncoder()函数内部完成
//encoder和encoderInputSurface的赋值。
prepareEncoder(displayFps, &encoder, &encoderInputSurface);

prepareEncoder()函数完成了初始化工作,具体可细分为以下几步:
2.1 设置编码需要的参数,包括: width、height、mime、color-format、bitrate、frame-rate、i-frame-interval,所有的参数被封装进AMessage中;
2.2 创建Looper实例, 并调用start()函数启动;
2.3 调用MediaCodec的静态函数:

static sp<MediaCodec> CreateByType(
        const sp<ALooper> &looper, const char *mime, bool encoder, status_t *err = NULL,
        pid_t pid = kNoPid);

创建一个MediaCodec实例, 其中looper为步骤2创建的Looper实例, mime为”video/avc”, encoder参数为true;
2.4 调用MediaCodec的

status_t configure(
        const sp<AMessage> &format,
        const sp<Surface> &nativeWindow,
        const sp<ICrypto> &crypto,
        uint32_t flags);

进行配置, 其中format参数为步骤1中封装的AMessage参数, nativeWindow和crypto均为NULL, flags为MediaCodec::CONFIGURE_FLAG_ENCODE;
2.5 调用MediaCodec的

status_t createInputSurface(sp<IGraphicBufferProducer>* bufferProducer);

函数创建一个sp实例, 通过指针参数bufferProducer返回给调用者;

2.6 调用MediaCodec的status_t start()函数启动编码线程。

Virtual Display的初始化

VirtualDisplay的初始化通过prepareVirtualDisplay()完成。

// Configure virtual display.
sp<IBinder> dpy;
err = prepareVirtualDisplay(mainDpyInfo, bufferProducer, &dpy);
if (err != NO_ERROR) {
    if (encoder != NULL) {
        encoder->release();
    }
    return err;
}

prepareVirtualDisplay()函数也包含以下几步:

3.1 创建display的句柄:

sp<IBinder> dpy = SurfaceComposerClient::createDisplay(
        String8("ScreenRecorder"), false /*secure*/);

3.2 在”事务”中完成一些参数的设置,见代码:

static uint32_t gVideoWidth = 0;
static uint32_t gVideoHeight = 0;

if (gVideoWidth == 0) {
    gVideoWidth = rotated ? mainDpyInfo.h : mainDpyInfo.w;
}
if (gVideoHeight == 0) {
    gVideoHeight = rotated ? mainDpyInfo.w : mainDpyInfo.h;
}

bool deviceRotated = isDeviceRotated(mainDpyInfo.orientation);
uint32_t sourceWidth, sourceHeight;
if (!deviceRotated) {
    sourceWidth = mainDpyInfo.w;
    sourceHeight = mainDpyInfo.h;
} else {
    sourceHeight = mainDpyInfo.w;
    sourceWidth = mainDpyInfo.h;
}

uint32_t outWidth, outHeight;
// 计算outWidth和outHeight。    
float displayAspect = (float) sourceHeight / (float) sourceWidth;
if (gVideoHeight > (uint32_t)(gVideoWidth * displayAspect)) {
    outWidth = gVideoWidth ;
    outHeight = (uint32_t)(gVideoWidth * displayAspect);
} else {
    // 屏幕转向,大小需要裁剪。
    outHeight = gVideoHeight;
    outWidth = (uint32_t)(gVideoHeight / displayAspect);
}
uint32_t offX, offY;
offX = (gVideoWidth - outWidth) / 2;
offY = (gVideoHeight - outHeight) / 2;

Rect displayRect(offX, offY, offX + outWidth, offY + outHeight);
Rect layerStackRect(sourceWidth, sourceHeight);

SurfaceComposerClient::openGlobalTransaction();
SurfaceComposerClient::setDisplaySurface(dpy, bufferProducer);
SurfaceComposerClient::setDisplayProjection(dpy,
        DISPLAY_ORIENTATION_0, layerStackRect, displayRect);
SurfaceComposerClient::setDisplayLayerStack(dpy, 0);
SurfaceComposerClient::closeGlobalTransaction();

这样函数执行完成后,调用者获得了dpy所指向的IBinder实例。

创建MediaMuxer实例

sp<MediaMuxer> muxer = NULL;
//fd为要保存的文件的描述符。
muxer = new MediaMuxer(fd, MediaMuxer::OUTPUT_FORMAT_MPEG_4);

开始编码工作

编码的主要实现函数接口:

static status_t runEncoder(const sp<MediaCodec>& encoder,
    const sp<MediaMuxer>& muxer, FILE* rawFp, const sp<IBinder>& mainDpy,
    const sp<IBinder>& virtualDpy, uint8_t orientation);

具体实现:

static status_t runEncoder(const sp<MediaCodec>& encoder,
        const sp<MediaMuxer>& muxer, FILE* rawFp, const sp<IBinder>& mainDpy,
        const sp<IBinder>& virtualDpy, uint8_t orientation) {
    static int kTimeout = 250000;   // be responsive on signal
    status_t err;
    ssize_t trackIdx = -1;
    uint32_t debugNumFrames = 0;
    int64_t startWhenNsec = systemTime(CLOCK_MONOTONIC);
    int64_t endWhenNsec = startWhenNsec + seconds_to_nanoseconds(gTimeLimitSec);
    DisplayInfo mainDpyInfo;

    assert((rawFp == NULL && muxer != NULL) || (rawFp != NULL && muxer == NULL));

    Vector<sp<ABuffer> > buffers;
    err = encoder->getOutputBuffers(&buffers);
    if (err != NO_ERROR) {
        fprintf(stderr, "Unable to get output buffers (err=%d)\n", err);
        return err;
    }

    // This is set by the signal handler.
    gStopRequested = false;

    // Run until we're signaled.
    while (!gStopRequested) {
        size_t bufIndex, offset, size;
        int64_t ptsUsec;
        uint32_t flags;

        if (systemTime(CLOCK_MONOTONIC) > endWhenNsec) {
            break;
        }

        err = encoder->dequeueOutputBuffer(&bufIndex, &offset, &size, &ptsUsec,
                &flags, kTimeout);
        switch (err) {
        case NO_ERROR:
            // got a buffer
            if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) != 0) {
                //BUFFER_FLAG_CODECCONFIG表示与codec initialization/codec specific相关的数据,忽略掉。
                if (muxer != NULL) {
                    size = 0;
                }
            }
            if (size != 0) {
                { // scope
                    // Check orientation, update if it has changed.
                    //
                    // Polling for changes is inefficient and wrong, but the
                    // useful stuff is hard to get at without a Dalvik VM.
                    err = SurfaceComposerClient::getDisplayInfo(mainDpy,
                            &mainDpyInfo);
                    if (err != NO_ERROR) {
                        ALOGW("getDisplayInfo(main) failed: %d", err);
                    } else if (orientation != mainDpyInfo.orientation) {
                        //重新设置事务。
                        SurfaceComposerClient::openGlobalTransaction();
                        setDisplayProjection(virtualDpy, mainDpyInfo);
                        SurfaceComposerClient::closeGlobalTransaction();
                        orientation = mainDpyInfo.orientation;
                    }
                }

                // If the virtual display isn't providing us with timestamps,
                // use the current time.  This isn't great -- we could get
                // decoded data in clusters -- but we're not expecting
                // to hit this anyway.
                if (ptsUsec == 0) {
                    ptsUsec = systemTime(SYSTEM_TIME_MONOTONIC) / 1000;
                }

                if (muxer == NULL) {
                    fwrite(buffers[bufIndex]->data(), 1, size, rawFp);
                    // Flush the data immediately in case we're streaming.
                    // We don't want to do this if all we've written is
                    // the SPS/PPS data because mplayer gets confused.
                    if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0) {
                        fflush(rawFp);
                    }
                } else {
                    // The MediaMuxer docs are unclear, but it appears that we
                    // need to pass either the full set of BufferInfo flags, or
                    // (flags & BUFFER_FLAG_SYNCFRAME).
                    //
                    // If this blocks for too long we could drop frames.  We may
                    // want to queue these up and do them on a different thread.
                    ATRACE_NAME("write sample");
                    assert(trackIdx != -1);
                    err = muxer->writeSampleData(buffers[bufIndex], trackIdx,
                            ptsUsec, flags);
                    if (err != NO_ERROR) {
                        fprintf(stderr,
                            "Failed writing data to muxer (err=%d)\n", err);
                        return err;
                    }
                }
                debugNumFrames++;
            }
            err = encoder->releaseOutputBuffer(bufIndex);
            if (err != NO_ERROR) {
                fprintf(stderr, "Unable to release output buffer (err=%d)\n",
                        err);
                return err;
            }
            if ((flags & MediaCodec::BUFFER_FLAG_EOS) != 0) {
                // Not expecting EOS from SurfaceFlinger.  Go with it.
                ALOGI("Received end-of-stream");
                gStopRequested = true;
            }
            break;
        case -EAGAIN:                       // INFO_TRY_AGAIN_LATER
            ALOGV("Got -EAGAIN, looping");
            break;
        case INFO_FORMAT_CHANGED:           // INFO_OUTPUT_FORMAT_CHANGED
            {
                // Format includes CSD, which we must provide to muxer.
                ALOGV("Encoder format changed");
                sp<AMessage> newFormat;
                encoder->getOutputFormat(&newFormat);
                if (muxer != NULL) {
                    trackIdx = muxer->addTrack(newFormat);
                    ALOGV("Starting muxer");
                    err = muxer->start();
                    if (err != NO_ERROR) {
                        fprintf(stderr, "Unable to start muxer (err=%d)\n", err);
                        return err;
                    }
                }
            }
            break;
        case INFO_OUTPUT_BUFFERS_CHANGED:   // INFO_OUTPUT_BUFFERS_CHANGED
            // Not expected for an encoder; handle it anyway.
            ALOGV("Encoder buffers changed");
            err = encoder->getOutputBuffers(&buffers);
            if (err != NO_ERROR) {
                fprintf(stderr,
                        "Unable to get new output buffers (err=%d)\n", err);
                return err;
            }
            break;
        case INVALID_OPERATION:
            ALOGW("dequeueOutputBuffer returned INVALID_OPERATION");
            return err;
        default:
            fprintf(stderr,
                    "Got weird result %d from dequeueOutputBuffer\n", err);
            return err;
        }
    }

    ALOGV("Encoder stopping (req=%d)", gStopRequested);
    if (gVerbose) {
        printf("Encoder stopping; recorded %u frames in %" PRId64 " seconds\n",
                debugNumFrames, nanoseconds_to_seconds(
                        systemTime(CLOCK_MONOTONIC) - startWhenNsec));
    }
    return NO_ERROR;
}

资源的释放

编码结束后,不要忘了资源的额释放。

//InputSurface置空。 
encoderInputSurface = NULL;

//VirtualDisplay的destryoy。 
SurfaceComposerClient::destroyDisplay(dpy);

if (overlay != NULL) overlay->stop(); 
//encoder的stop。 
if (encoder != NULL) encoder->stop(); 
if (muxer != NULL) { 
muxer->stop(); 
} else if (rawFp != stdout) { 
fclose(rawFp); 
} 
if (encoder != NULL) encoder->release(); 

结束。

  • 2
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
MediaCodecAndroid 系统提供的音视频编解码器,可以用来进行视频压缩、编码、解码等操作。下面是一个简单的 MediaCodec 使用示例: 首先,需要创建一个 MediaCodec 对象,指定需要进行编解码的媒体格式: ```java private MediaCodec mMediaCodec; private int mWidth = 640; // 视频宽度 private int mHeight = 480; // 视频高度 private int mFrameRate = 30; // 视频帧率 private int mBitRate = 2 * 1024 * 1024; // 视频码率 private void initMediaCodec() { try { MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", mWidth, mHeight); mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate); mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mFrameRate); mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); mMediaCodec = MediaCodec.createEncoderByType("video/avc"); mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); mMediaCodec.start(); } catch (IOException e) { e.printStackTrace(); } } ``` 然后,需要创建一个 Surface 对象,作为输入数据的源,将视频数据写入 Surface 对象: ```java private Surface mSurface; private void initSurface() { SurfaceTexture surfaceTexture = new SurfaceTexture(0); mSurface = new Surface(surfaceTexture); surfaceTexture.setDefaultBufferSize(mWidth, mHeight); surfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() { @Override public void onFrameAvailable(SurfaceTexture surfaceTexture) { // 视频帧可用,处理视频帧 } }); } private void writeFrameToSurface(byte[] data, long timestamp) { try { ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers(); int inputBufferIndex = mMediaCodec.dequeueInputBuffer(-1); if (inputBufferIndex >= 0) { ByteBuffer inputBuffer = inputBuffers[inputBufferIndex]; inputBuffer.clear(); inputBuffer.put(data); mMediaCodec.queueInputBuffer(inputBufferIndex, 0, data.length, timestamp, 0); } } catch (Throwable t) { t.printStackTrace(); } } ``` 最后,需要从 MediaCodec 读取编码后的数据,进行处理: ```java private void handleEncodedData() { try { ByteBuffer[] outputBuffers = mMediaCodec.getOutputBuffers(); MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 0); while (outputBufferIndex >= 0) { ByteBuffer outputBuffer = outputBuffers[outputBufferIndex]; // 处理编码后的数据 mMediaCodec.releaseOutputBuffer(outputBufferIndex, false); outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 0); } } catch (Throwable t) { t.printStackTrace(); } } ``` 以上是一个简单的 MediaCodec 使用示例,具体实现可能需要根据实际情况进行调整。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值