主屏幕信息的获取
SurfaceComposerClient类提供了获取主屏幕信息的方法,获取结果保存在DisplayInfo结构体中,如果要监听屏幕方向变化,则可以轮询getDisplayInfo( )函数来实现。目前,还没有想到更好的监听屏幕转向的方法,汗。。。,有知道的大神请及时告知一下((⊙﹏⊙)b)。
sp<IBinder> mainDpy = SurfaceComposerClient::getBuiltInDisplay(
ISurfaceComposer::eDisplayIdMain);
DisplayInfo mainDpyInfo;
status_t err = SurfaceComposerClient::getDisplayInfo(mainDpy, &mainDpyInfo);
if (err != NO_ERROR) {
fprintf(stderr, "ERROR: unable to get display characteristics\n");
return err;
}
printf("Main display is %dx%d @%.2ffps (orientation=%u)\n",
mainDpyInfo.w, mainDpyInfo.h, mainDpyInfo.fps,
mainDpyInfo.orientation);
MediaCodec的初始化
这里借鉴screenrecord的源码,来分析一下使用MediaCodec编码时的初始化流程。
sp<MediaCodec> encoder;
sp<IGraphicBufferProducer> encoderInputSurface;
//按地址传递参数encoder和encoderInputSurface, 在prepareEncoder()函数内部完成
//encoder和encoderInputSurface的赋值。
prepareEncoder(displayFps, &encoder, &encoderInputSurface);
prepareEncoder()函数完成了初始化工作,具体可细分为以下几步:
2.1 设置编码需要的参数,包括: width、height、mime、color-format、bitrate、frame-rate、i-frame-interval,所有的参数被封装进AMessage中;
2.2 创建Looper实例, 并调用start()函数启动;
2.3 调用MediaCodec的静态函数:
static sp<MediaCodec> CreateByType(
const sp<ALooper> &looper, const char *mime, bool encoder, status_t *err = NULL,
pid_t pid = kNoPid);
创建一个MediaCodec实例, 其中looper为步骤2创建的Looper实例, mime为”video/avc”, encoder参数为true;
2.4 调用MediaCodec的
status_t configure(
const sp<AMessage> &format,
const sp<Surface> &nativeWindow,
const sp<ICrypto> &crypto,
uint32_t flags);
进行配置, 其中format参数为步骤1中封装的AMessage参数, nativeWindow和crypto均为NULL, flags为MediaCodec::CONFIGURE_FLAG_ENCODE;
2.5 调用MediaCodec的
status_t createInputSurface(sp<IGraphicBufferProducer>* bufferProducer);
函数创建一个sp实例, 通过指针参数bufferProducer返回给调用者;
2.6 调用MediaCodec的status_t start()函数启动编码线程。
Virtual Display的初始化
VirtualDisplay的初始化通过prepareVirtualDisplay()完成。
// Configure virtual display.
sp<IBinder> dpy;
err = prepareVirtualDisplay(mainDpyInfo, bufferProducer, &dpy);
if (err != NO_ERROR) {
if (encoder != NULL) {
encoder->release();
}
return err;
}
prepareVirtualDisplay()函数也包含以下几步:
3.1 创建display的句柄:
sp<IBinder> dpy = SurfaceComposerClient::createDisplay(
String8("ScreenRecorder"), false /*secure*/);
3.2 在”事务”中完成一些参数的设置,见代码:
static uint32_t gVideoWidth = 0;
static uint32_t gVideoHeight = 0;
if (gVideoWidth == 0) {
gVideoWidth = rotated ? mainDpyInfo.h : mainDpyInfo.w;
}
if (gVideoHeight == 0) {
gVideoHeight = rotated ? mainDpyInfo.w : mainDpyInfo.h;
}
bool deviceRotated = isDeviceRotated(mainDpyInfo.orientation);
uint32_t sourceWidth, sourceHeight;
if (!deviceRotated) {
sourceWidth = mainDpyInfo.w;
sourceHeight = mainDpyInfo.h;
} else {
sourceHeight = mainDpyInfo.w;
sourceWidth = mainDpyInfo.h;
}
uint32_t outWidth, outHeight;
// 计算outWidth和outHeight。
float displayAspect = (float) sourceHeight / (float) sourceWidth;
if (gVideoHeight > (uint32_t)(gVideoWidth * displayAspect)) {
outWidth = gVideoWidth ;
outHeight = (uint32_t)(gVideoWidth * displayAspect);
} else {
// 屏幕转向,大小需要裁剪。
outHeight = gVideoHeight;
outWidth = (uint32_t)(gVideoHeight / displayAspect);
}
uint32_t offX, offY;
offX = (gVideoWidth - outWidth) / 2;
offY = (gVideoHeight - outHeight) / 2;
Rect displayRect(offX, offY, offX + outWidth, offY + outHeight);
Rect layerStackRect(sourceWidth, sourceHeight);
SurfaceComposerClient::openGlobalTransaction();
SurfaceComposerClient::setDisplaySurface(dpy, bufferProducer);
SurfaceComposerClient::setDisplayProjection(dpy,
DISPLAY_ORIENTATION_0, layerStackRect, displayRect);
SurfaceComposerClient::setDisplayLayerStack(dpy, 0);
SurfaceComposerClient::closeGlobalTransaction();
这样函数执行完成后,调用者获得了dpy所指向的IBinder实例。
创建MediaMuxer实例
sp<MediaMuxer> muxer = NULL;
//fd为要保存的文件的描述符。
muxer = new MediaMuxer(fd, MediaMuxer::OUTPUT_FORMAT_MPEG_4);
开始编码工作
编码的主要实现函数接口:
static status_t runEncoder(const sp<MediaCodec>& encoder,
const sp<MediaMuxer>& muxer, FILE* rawFp, const sp<IBinder>& mainDpy,
const sp<IBinder>& virtualDpy, uint8_t orientation);
具体实现:
static status_t runEncoder(const sp<MediaCodec>& encoder,
const sp<MediaMuxer>& muxer, FILE* rawFp, const sp<IBinder>& mainDpy,
const sp<IBinder>& virtualDpy, uint8_t orientation) {
static int kTimeout = 250000; // be responsive on signal
status_t err;
ssize_t trackIdx = -1;
uint32_t debugNumFrames = 0;
int64_t startWhenNsec = systemTime(CLOCK_MONOTONIC);
int64_t endWhenNsec = startWhenNsec + seconds_to_nanoseconds(gTimeLimitSec);
DisplayInfo mainDpyInfo;
assert((rawFp == NULL && muxer != NULL) || (rawFp != NULL && muxer == NULL));
Vector<sp<ABuffer> > buffers;
err = encoder->getOutputBuffers(&buffers);
if (err != NO_ERROR) {
fprintf(stderr, "Unable to get output buffers (err=%d)\n", err);
return err;
}
// This is set by the signal handler.
gStopRequested = false;
// Run until we're signaled.
while (!gStopRequested) {
size_t bufIndex, offset, size;
int64_t ptsUsec;
uint32_t flags;
if (systemTime(CLOCK_MONOTONIC) > endWhenNsec) {
break;
}
err = encoder->dequeueOutputBuffer(&bufIndex, &offset, &size, &ptsUsec,
&flags, kTimeout);
switch (err) {
case NO_ERROR:
// got a buffer
if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) != 0) {
//BUFFER_FLAG_CODECCONFIG表示与codec initialization/codec specific相关的数据,忽略掉。
if (muxer != NULL) {
size = 0;
}
}
if (size != 0) {
{ // scope
// Check orientation, update if it has changed.
//
// Polling for changes is inefficient and wrong, but the
// useful stuff is hard to get at without a Dalvik VM.
err = SurfaceComposerClient::getDisplayInfo(mainDpy,
&mainDpyInfo);
if (err != NO_ERROR) {
ALOGW("getDisplayInfo(main) failed: %d", err);
} else if (orientation != mainDpyInfo.orientation) {
//重新设置事务。
SurfaceComposerClient::openGlobalTransaction();
setDisplayProjection(virtualDpy, mainDpyInfo);
SurfaceComposerClient::closeGlobalTransaction();
orientation = mainDpyInfo.orientation;
}
}
// If the virtual display isn't providing us with timestamps,
// use the current time. This isn't great -- we could get
// decoded data in clusters -- but we're not expecting
// to hit this anyway.
if (ptsUsec == 0) {
ptsUsec = systemTime(SYSTEM_TIME_MONOTONIC) / 1000;
}
if (muxer == NULL) {
fwrite(buffers[bufIndex]->data(), 1, size, rawFp);
// Flush the data immediately in case we're streaming.
// We don't want to do this if all we've written is
// the SPS/PPS data because mplayer gets confused.
if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0) {
fflush(rawFp);
}
} else {
// The MediaMuxer docs are unclear, but it appears that we
// need to pass either the full set of BufferInfo flags, or
// (flags & BUFFER_FLAG_SYNCFRAME).
//
// If this blocks for too long we could drop frames. We may
// want to queue these up and do them on a different thread.
ATRACE_NAME("write sample");
assert(trackIdx != -1);
err = muxer->writeSampleData(buffers[bufIndex], trackIdx,
ptsUsec, flags);
if (err != NO_ERROR) {
fprintf(stderr,
"Failed writing data to muxer (err=%d)\n", err);
return err;
}
}
debugNumFrames++;
}
err = encoder->releaseOutputBuffer(bufIndex);
if (err != NO_ERROR) {
fprintf(stderr, "Unable to release output buffer (err=%d)\n",
err);
return err;
}
if ((flags & MediaCodec::BUFFER_FLAG_EOS) != 0) {
// Not expecting EOS from SurfaceFlinger. Go with it.
ALOGI("Received end-of-stream");
gStopRequested = true;
}
break;
case -EAGAIN: // INFO_TRY_AGAIN_LATER
ALOGV("Got -EAGAIN, looping");
break;
case INFO_FORMAT_CHANGED: // INFO_OUTPUT_FORMAT_CHANGED
{
// Format includes CSD, which we must provide to muxer.
ALOGV("Encoder format changed");
sp<AMessage> newFormat;
encoder->getOutputFormat(&newFormat);
if (muxer != NULL) {
trackIdx = muxer->addTrack(newFormat);
ALOGV("Starting muxer");
err = muxer->start();
if (err != NO_ERROR) {
fprintf(stderr, "Unable to start muxer (err=%d)\n", err);
return err;
}
}
}
break;
case INFO_OUTPUT_BUFFERS_CHANGED: // INFO_OUTPUT_BUFFERS_CHANGED
// Not expected for an encoder; handle it anyway.
ALOGV("Encoder buffers changed");
err = encoder->getOutputBuffers(&buffers);
if (err != NO_ERROR) {
fprintf(stderr,
"Unable to get new output buffers (err=%d)\n", err);
return err;
}
break;
case INVALID_OPERATION:
ALOGW("dequeueOutputBuffer returned INVALID_OPERATION");
return err;
default:
fprintf(stderr,
"Got weird result %d from dequeueOutputBuffer\n", err);
return err;
}
}
ALOGV("Encoder stopping (req=%d)", gStopRequested);
if (gVerbose) {
printf("Encoder stopping; recorded %u frames in %" PRId64 " seconds\n",
debugNumFrames, nanoseconds_to_seconds(
systemTime(CLOCK_MONOTONIC) - startWhenNsec));
}
return NO_ERROR;
}
资源的释放
编码结束后,不要忘了资源的额释放。
//InputSurface置空。
encoderInputSurface = NULL;
//VirtualDisplay的destryoy。
SurfaceComposerClient::destroyDisplay(dpy);
if (overlay != NULL) overlay->stop();
//encoder的stop。
if (encoder != NULL) encoder->stop();
if (muxer != NULL) {
muxer->stop();
} else if (rawFp != stdout) {
fclose(rawFp);
}
if (encoder != NULL) encoder->release();
结束。