功能:沿用Android cmds下的screenrecord代码,整理成可以供native程序调用的一个可以获取实时screen h264码流的一个库,分享一下!!
头文件screenrecord.h#include
class IScreenRecordCallback
{
public:
virtual ~IScreenRecordCallback(){}
virtual void onData(void* pData, size_t size) = 0;
//virtual void onCodecConfig(void* pData, size_t size) = 0;
};
class ScreenRecordImp;
class ScreenRecord
{
public:
ScreenRecord();
~ScreenRecord();
int start(IScreenRecordCallback* callback);
void stop();
private:
ScreenRecordImp* m_pImp;
};
实现文件:screenrecord.cpp#include "screenrecord.h"
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
using namespace android;
using namespace std;
#define LOG_TAG "LibScreenRecord"
//#define LOG_NDEBUG 0
static const uint32_t kFallbackWidth = 1280; // 720p
static const uint32_t kFallbackHeight = 720;
class ScreenRecordImp
{
public:
ScreenRecordImp();
int start(IScreenRecordCallback* callback);
void stop();
private:
bool isDeviceRotated(int orientation);
status_t prepareEncoder(float displayFps, sp* pCodec,
sp* pBufferProducer);
status_t prepareVirtualDisplay(const DisplayInfo& mainDpyInfo,
const sp& bufferProducer,
sp* pDisplayHandle);
status_t setDisplayProjection(const sp& dpy,
const DisplayInfo& mainDpyInfo);
status_t runEncoder(const sp& encoder, IScreenRecordCallback* callback, sp& mainDpy, sp& dpy, uint8_t orientation);
private:
bool mRotate; // rotate 90 degrees
//bool mSizeSpecified; // was size explicitly requested?
uint32_t mVideoWidth; // default width+height
uint32_t mVideoHeight;
uint32_t mBitRate;
//uint32_t mTimeLimitSec;
bool mStopRequested;
};
ScreenRecord::ScreenRecord()
{
m_pImp = new ScreenRecordImp;
}
ScreenRecord::~ScreenRecord()
{
stop();
delete m_pImp;
m_pImp = NULL;
}
int ScreenRecord::start(IScreenRecordCallback* callback)
{
return m_pImp->start(callback);
}
void ScreenRecord::stop()
{
m_pImp->stop();
}
ScreenRecordImp::ScreenRecordImp():mRotate(false),
mVideoWidth(0),
mVideoHeight(0),
mBitRate(4000000), // 4Mbp
mStopRequested(true)
{
}
bool ScreenRecordImp::isDeviceRotated(int orientation) {
return orientation != DISPLAY_ORIENTATION_0 &&
orientation != DISPLAY_ORIENTATION_180;
}
status_t ScreenRecordImp::setDisplayProjection(const sp& dpy,
const DisplayInfo& mainDpyInfo) {
status_t err;
// Set the region of the layer stack we're interested in, which in our
// case is "all of it". If the app is rotated (so that the width of the
// app is based on the height of the display), reverse width/height.
bool deviceRotated = isDeviceRotated(mainDpyInfo.orientation);
uint32_t sourceWidth, sourceHeight;
if (!deviceRotated) {
sourceWidth = mainDpyInfo.w;
sourceHeight = mainDpyInfo.h;
} else {
ALOGV("using rotated width/height");
sourceHeight = mainDpyInfo.w;
sourceWidth = mainDpyInfo.h;
}
Rect layerStackRect(sourceWidth, sourceHeight);
// We need to preserve the aspect ratio of the display.
float displayAspect = (float) sourceHeight / (float) sourceWidth;
// Set the way we map the output onto the display surface (which will
// be e.g. 1280x720 for a 720p video). The rect is interpreted
// post-rotation, so if the display is rotated 90 degrees we need to
// "pre-rotate" it by flipping width/height, so that the orientation
// adjustment changes it back.
//
// We might want to encode a portrait display as landscape to use more
// of the screen real estate. (If players respect a 90-degree rotation
// hint, we can essentially get a 720x1280 video instead of 1280x720.)
// In that case, we swap the configured video width/height and then
// supply a rotation value to the display projection.
uint32_t videoWidth, videoHeight;
uint32_t outWidth, outHeight;
if (!mRotate) {
videoWidth = mVideoWidth;
videoHeight = mVideoHeight;
} else {
videoWidth = mVideoHeight;
videoHeight = mVideoWidth;
}
if (videoHeight > (uint32_t)(videoWidth * displayAspect)) {
// limited by narrow width; reduce height
outWidth = videoWidth;
outHeight = (uint32_t)(videoWidth * displayAspect);
} else {
// limited by short height; restrict width
outHeight = videoHeight;
outWidth = (uint32_t)(videoHeight / displayAspect);
}
uint32_t offX, offY;
offX = (videoWidth - outWidth) / 2;
offY = (videoHeight - outHeight) / 2;
Rect displayRect(offX, offY, offX + outWidth, offY + outHeight);
/*if (gVerbose) {
if (mRotate) {
printf("Rotated content area is %ux%u at offset x=%d y=%d\n",
outHeight, outWidth, offY, offX);
} else {
printf("Content area is %ux%u at offset x=%d y=%d\n",
outWidth, outHeight, offX, offY);
}
}*/
SurfaceComposerClient::setDisplayProjection(dpy,
mRotate ? DISPLAY_ORIENTATION_90 : DISPLAY_ORIENTATION_0,
layerStackRect, displayRect);
return NO_ERROR;
}
status_t ScreenRecordImp::prepareEncoder(float displayFps, sp* pCodec,
sp* pBufferProducer)
{
cout<
status_t err;
sp format = new AMessage;
format->setInt32("width", mVideoWidth);
format->setInt32("height", mVideoHeight);
format->setString("mime", "video/avc");
format->setInt32("color-format", OMX_COLOR_FormatAndroidOpaque);
format->setInt32("bitrate", mBitRate);
format->setFloat("frame-rate", displayFps);
format->setInt32("i-frame-interval", 10);
sp looper = new ALooper;
looper->setName("libscreenrecord_looper");
looper->start();
ALOGV("Creating codec");
cout<
sp codec = MediaCodec::CreateByType(looper, "video/avc", true);
if (codec == NULL) {
fprintf(stderr, "ERROR: unable to create video/avc codec instance\n");
return UNKNOWN_ERROR;
}
err = codec->configure(format, NULL, NULL,
MediaCodec::CONFIGURE_FLAG_ENCODE);
if (err != NO_ERROR) {
codec->release();
codec.clear();
fprintf(stderr, "ERROR: unable to configure codec (err=%d)\n", err);
return err;
}
ALOGV("Creating buffer producer");
cout<
sp bufferProducer;
err = codec->createInputSurface(&bufferProducer);
if (err != NO_ERROR) {
codec->release();
codec.clear();
fprintf(stderr,
"ERROR: unable to create encoder input surface (err=%d)\n", err);
return err;
}
ALOGV("Starting codec");
cout<
err = codec->start();
if (err != NO_ERROR) {
codec->release();
codec.clear();
fprintf(stderr, "ERROR: unable to start codec (err=%d)\n", err);
return err;
}
ALOGV("Codec prepared");
cout<
*pCodec = codec;
*pBufferProducer = bufferProducer;
return 0;
}
status_t ScreenRecordImp::prepareVirtualDisplay(const DisplayInfo& mainDpyInfo,
const sp& bufferProducer,
sp* pDisplayHandle) {
cout<
status_t err;
// Set the region of the layer stack we're interested in, which in our
// case is "all of it". If the app is rotated (so that the width of the
// app is based on the height of the display), reverse width/height.
bool deviceRotated = isDeviceRotated(mainDpyInfo.orientation);
uint32_t sourceWidth, sourceHeight;
if (!deviceRotated) {
sourceWidth = mainDpyInfo.w;
sourceHeight = mainDpyInfo.h;
} else {
ALOGV("using rotated width/height");
sourceHeight = mainDpyInfo.w;
sourceWidth = mainDpyInfo.h;
}
Rect layerStackRect(sourceWidth, sourceHeight);
// We need to preserve the aspect ratio of the display.
float displayAspect = (float) sourceHeight / (float) sourceWidth;
// Set the way we map the output onto the display surface (which will
// be e.g. 1280x720 for a 720p video). The rect is interpreted
// post-rotation, so if the display is rotated 90 degrees we need to
// "pre-rotate" it by flipping width/height, so that the orientation
// adjustment changes it back.
//
// We might want to encode a portrait display as landscape to use more
// of the screen real estate. (If players respect a 90-degree rotation
// hint, we can essentially get a 720x1280 video instead of 1280x720.)
// In that case, we swap the configured video width/height and then
// supply a rotation value to the display projection.
uint32_t videoWidth, videoHeight;
uint32_t outWidth, outHeight;
if (!mRotate) {
videoWidth = mVideoWidth;
videoHeight = mVideoHeight;
} else {
videoWidth = mVideoHeight;
videoHeight = mVideoWidth;
}
if (videoHeight > (uint32_t)(videoWidth * displayAspect)) {
// limited by narrow width; reduce height
outWidth = videoWidth;
outHeight = (uint32_t)(videoWidth * displayAspect);
} else {
// limited by short height; restrict width
outHeight = videoHeight;
outWidth = (uint32_t)(videoHeight / displayAspect);
}
uint32_t offX, offY;
offX = (videoWidth - outWidth) / 2;
offY = (videoHeight - outHeight) / 2;
Rect displayRect(offX, offY, offX + outWidth, offY + outHeight);
cout<
sp dpy = SurfaceComposerClient::createDisplay(
String8("LibScreenRecorder"), false /* secure */);
SurfaceComposerClient::openGlobalTransaction();
SurfaceComposerClient::setDisplaySurface(dpy, bufferProducer);
SurfaceComposerClient::setDisplayProjection(dpy,
mRotate ? DISPLAY_ORIENTATION_90 : DISPLAY_ORIENTATION_0,
layerStackRect, displayRect);
SurfaceComposerClient::setDisplayLayerStack(dpy, 0); // default stack
SurfaceComposerClient::closeGlobalTransaction();
*pDisplayHandle = dpy;
return NO_ERROR;
}
status_t ScreenRecordImp::runEncoder(const sp& encoder, IScreenRecordCallback* callback, sp& mainDpy, sp& virtualDpy,
uint8_t orientation)
{
cout<
static int kTimeout = 250000; // be responsive on signal
status_t err;
ssize_t trackIdx = -1;
uint32_t debugNumFrames = 0;
int64_t startWhenNsec = systemTime(CLOCK_MONOTONIC);
//int64_t endWhenNsec = startWhenNsec + seconds_to_nanoseconds(180);
Vector > buffers;
err = encoder->getOutputBuffers(&buffers);
if (err != NO_ERROR) {
fprintf(stderr, "Unable to get output buffers (err=%d)\n", err);
return err;
}
DisplayInfo mainDpyInfo;
cout<
// This is set by the signal handler.
mStopRequested = false;
// Run until we're signaled.
while (!mStopRequested) {
size_t bufIndex, offset, size;
int64_t ptsUsec;
uint32_t flags;
/*if (systemTime(CLOCK_MONOTONIC) > endWhenNsec) {
break;
}*/
ALOGV("Calling dequeueOutputBuffer");
err = encoder->dequeueOutputBuffer(&bufIndex, &offset, &size, &ptsUsec,
&flags, kTimeout);
ALOGV("dequeueOutputBuffer returned %d", err);
cout<
switch (err) {
case NO_ERROR:
// got a buffer
if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) != 0)
{
// ignore this -- we passed the CSD into MediaMuxer when
// we got the format change notification
ALOGV("Got codec config buffer (%u bytes); ignoring", size);
cout<
//size = 0;
}
if (size != 0)
{
ALOGV("Got data in buffer %d, size=%d, pts=%lld",
bufIndex, size, ptsUsec);
//CHECK(trackIdx != -1);
// If the virtual display isn't providing us with timestamps,
// use the current time.
if (ptsUsec == 0) {
ptsUsec = systemTime(SYSTEM_TIME_MONOTONIC) / 1000;
}
// The MediaMuxer docs are unclear, but it appears that we
// need to pass either the full set of BufferInfo flags, or
// (flags & BUFFER_FLAG_SYNCFRAME).
/*err = muxer->writeSampleData(buffers[bufIndex], trackIdx,
ptsUsec, flags);
if (err != NO_ERROR) {
fprintf(stderr, "Failed writing data to muxer (err=%d)\n",
err);
return err;
}*/
{ // scope
//ATRACE_NAME("orientation");
// Check orientation, update if it has changed.
//
// Polling for changes is inefficient and wrong, but the
// useful stuff is hard to get at without a Dalvik VM.
err = SurfaceComposerClient::getDisplayInfo(mainDpy,
&mainDpyInfo);
if (err != NO_ERROR) {
ALOGW("getDisplayInfo(main) failed: %d", err);
} else if (orientation != mainDpyInfo.orientation) {
ALOGD("orientation changed, now %d", mainDpyInfo.orientation);
SurfaceComposerClient::openGlobalTransaction();
setDisplayProjection(virtualDpy, mainDpyInfo);
SurfaceComposerClient::closeGlobalTransaction();
orientation = mainDpyInfo.orientation;
}
}
debugNumFrames++;
ALOGV("Got codec NumFrames:%d", debugNumFrames);
cout<
callback->onData(buffers[bufIndex]->data(), size);
//if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0)
//{
//fflush(rawFp);
//callback->FFlush();
//}
}
cout<
err = encoder->releaseOutputBuffer(bufIndex);
if (err != NO_ERROR) {
fprintf(stderr, "Unable to release output buffer (err=%d)\n",
err);
return err;
}
cout<
if ((flags & MediaCodec::BUFFER_FLAG_EOS) != 0) {
// Not expecting EOS from SurfaceFlinger. Go with it.
ALOGD("Received end-of-stream");
cout<
mStopRequested = true;
}
break;
case -EAGAIN: // INFO_TRY_AGAIN_LATER
ALOGV("Got -EAGAIN, looping");
cout<
break;
case INFO_FORMAT_CHANGED: // INFO_OUTPUT_FORMAT_CHANGED -1012
{
// format includes CSD, which we must provide to muxer
//sp newFormat;
//encoder->getOutputFormat(&newFormat);
//callback->formatChanged(newFormat);
ALOGV("Encoder format changed");
}
break;
case INFO_OUTPUT_BUFFERS_CHANGED: // INFO_OUTPUT_BUFFERS_CHANGED -1014
// not expected for an encoder; handle it anyway
ALOGV("Encoder buffers changed");
cout<
err = encoder->getOutputBuffers(&buffers);
if (err != NO_ERROR) {
fprintf(stderr,
"Unable to get new output buffers (err=%d)\n", err);
return err;
}
break;
case INVALID_OPERATION:
fprintf(stderr, "Request for encoder buffer failed\n");
return err;
default:
fprintf(stderr,
"Got weird result %d from dequeueOutputBuffer\n", err);
return err;
}
}
ALOGV("Encoder stopping (req=%d)", mStopRequested);
cout<
return NO_ERROR;
}
int ScreenRecordImp::start(IScreenRecordCallback* callback)
{
cout<
status_t err;
// Start Binder thread pool. MediaCodec needs to be able to receive
// messages from mediaserver.
sp self = ProcessState::self();
self->startThreadPool();
cout<
// Get main display parameters.
sp mainDpy = SurfaceComposerClient::getBuiltInDisplay(
ISurfaceComposer::eDisplayIdMain);
DisplayInfo mainDpyInfo;
err = SurfaceComposerClient::getDisplayInfo(mainDpy, &mainDpyInfo);
if (err != NO_ERROR) {
fprintf(stderr, "ERROR: unable to get display characteristics\n");
return err;
}
bool rotated = isDeviceRotated(mainDpyInfo.orientation);
if (mVideoWidth == 0) {
mVideoWidth = rotated ? mainDpyInfo.h : mainDpyInfo.w;
}
if (mVideoHeight == 0) {
mVideoHeight = rotated ? mainDpyInfo.w : mainDpyInfo.h;
}
cout<
// Configure and start the encoder.
sp encoder;
sp bufferProducer;
err = prepareEncoder(mainDpyInfo.fps, &encoder, &bufferProducer);
if (err != NO_ERROR) {
// fallback is defined for landscape; swap if we're in portrait
bool needSwap = mVideoWidth
uint32_t newWidth = needSwap ? kFallbackHeight : kFallbackWidth;
uint32_t newHeight = needSwap ? kFallbackWidth : kFallbackHeight;
if (mVideoWidth != newWidth && mVideoHeight != newHeight) {
ALOGV("Retrying with 720p");
fprintf(stderr, "WARNING: failed at %dx%d, retrying at %dx%d\n",
mVideoWidth, mVideoHeight, newWidth, newHeight);
mVideoWidth = newWidth;
mVideoHeight = newHeight;
err = prepareEncoder(mainDpyInfo.fps, &encoder, &bufferProducer);
}
}
if (err != NO_ERROR) {
return err;
}
sp dpy;
err = prepareVirtualDisplay(mainDpyInfo, bufferProducer, &dpy);
if (err != NO_ERROR) {
encoder->release();
encoder.clear();
return err;
}
// Main encoder loop.
err = runEncoder(encoder,callback, mainDpy, dpy, mainDpyInfo.orientation);
if (err != NO_ERROR) {
encoder->release();
encoder.clear();
return err;
}
bufferProducer = NULL;
SurfaceComposerClient::destroyDisplay(dpy);
encoder->stop();
//muxer->stop();
encoder->release();
return 0;
}
void ScreenRecordImp::stop()
{
mStopRequested = true;
}
用法class ScreenRecordCallback:public IScreenRecordCallback
{
public:
virtual ~ScreenRecordCallback(){}
virtual void onData(void* pData, size_t size)
{
}
};
int main(int arg, char** arv)
{
ScreenRecordCallback callback;
ScreenRecord recorder
recorder.start(&callback);
return 0;
}
抓到屏幕码流后ScreenRecordCallback的onData会被回调,收到的数据存下来就是h264码流
mikefileLOCAL_PATH:= $(call my-dir)
include $(CLEAR_VARS)
LOCAL_SRC_FILES := \
screenrecord.cpp \
LOCAL_SHARED_LIBRARIES := \
libstagefright libmedia libutils libbinder libstagefright_foundation \
libjpeg libgui libcutils liblog libEGL libGLESv2
LOCAL_C_INCLUDES := \
frameworks/av/media/libstagefright \
frameworks/av/media/libstagefright/include \
$(TOP)/frameworks/native/include/media/openmax \
external/jpeg
LOCAL_CFLAGS += -Wno-multichar
LOCAL_MODULE_TAGS := optional
LOCAL_MODULE:= libscreenrecord
include $(BUILD_SHARED_LIBRARY)
后续继续实现Android推送码流到PC机,PC机能够解码显示。
原文:https://blog.csdn.net/star_ni/article/details/54948723