Android MediaRecorder系统结构

前面有分析过Camera的实现,现在来看看MediaRecorder的实现,这里我不会太去关注它的分层结构,我更关注它的逻辑!

APP层 /path/to/aosp/frameworks/base/media/java/android/media/MediaRecorder.java
JNI层 /path/to/aosp/frameworks/base/media/jni/android_media_MediaRecorder.cpp
调用NATIVE层的MediaRecorder(这里是BnMediaRecorderClient)
header /path/to/aosp/frameworks/av/include/media/mediarecorder.h
implementation /path/to/aosp/frameworks/av/media/libmedia/mediarecorder.cpp

1
2
3
4
5
6
7
8
9
10
11
12
13
14
MediaRecorder::MediaRecorder() : mSurfaceMediaSource(NULL)
{
    ALOGV("constructor");
 
    const sp<IMediaPlayerService>& service(getMediaPlayerService());
    if (service != NULL) {
        mMediaRecorder = service->createMediaRecorder(getpid());
    }
    if (mMediaRecorder != NULL) {
        mCurrentState = MEDIA_RECORDER_IDLE;
    }
 
    doCleanUp();
}

getMediaPlayerService()这个方法位于/path/to/aosp/frameworks/av/include/media/IMediaDeathNotifier.h

获取到MediaPlayerService(这个是BpMediaPlayerService)之后
调用IMediaPlayerService当中的

1
2
3
4
5
6
7
8
9
sp<IMediaRecorder> MediaPlayerService::createMediaRecorder(pid_t pid)
{
    sp<MediaRecorderClient> recorder = new MediaRecorderClient(this, pid);
    wp<MediaRecorderClient> w = recorder;
    Mutex::Autolock lock(mLock);
    mMediaRecorderClients.add(w);
    ALOGV("Create new media recorder client from pid %d", pid);
    return recorder;
}

创建MediaRecorderClient(这里是BnMediaRecorder)

但是通过binder拿到的是BpMediaRecorder
因为有如下的interface_cast过程

1
2
3
4
5
6
7
8
virtual sp<IMediaRecorder> createMediaRecorder(pid_t pid)
{
    Parcel data, reply;
    data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
    data.writeInt32(pid);
    remote()->transact(CREATE_MEDIA_RECORDER, data, &reply);
    return interface_cast<IMediaRecorder>(reply.readStrongBinder());
}

而MediaRecorderClient当中又会创建StagefrightRecorder(MediaRecorderBase),它位于
/path/to/aosp/frameworks/av/media/libmediaplayerservice/StagefrightRecorder.cpp

目前我们可以认为在APP/JNI/NATIVE这边是在一个进程当中,在MediaPlayerService当中的MediaRecorderClient/StagefrightRecorder是在另外一个进程当中,他们之间通过binder通信,而且Bp和Bn我们也都有拿到,后面我们将不再仔细区分Bp和Bn。

客户端这边
BnMediaRecorderClient
BpMediaRecorder
BpMediaPlayerService

服务端这边
BpMediaRecorderClient(如果需要通知客户端的话,它可以获得这个Bp)
BnMediaRecorder
BnMediaPlayerService

这有张图(点过去看原始大图)
Android MediaRecorder Diagram

我们以开始录影为例子,比如start()

在这里就兵分两路,一个CameraSource,一个MPEG4Writer(sp mWriter)
这两个class都位于/path/to/aosp/frameworks/av/media/libstagefright/当中

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
status_t StagefrightRecorder::startMPEG4Recording() {
    int32_t totalBitRate;
    status_t err = setupMPEG4Recording(
            mOutputFd, mVideoWidth, mVideoHeight,
            mVideoBitRate, &totalBitRate, &mWriter);
    if (err != OK) {
        return err;
    }
 
    int64_t startTimeUs = systemTime() / 1000;
    sp<MetaData> meta = new MetaData;
    setupMPEG4MetaData(startTimeUs, totalBitRate, &meta);
 
    err = mWriter->start(meta.get());
    if (err != OK) {
        return err;
    }
 
    return OK;
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
status_t StagefrightRecorder::setupMPEG4Recording(
        int outputFd,
        int32_t videoWidth, int32_t videoHeight,
        int32_t videoBitRate,
        int32_t *totalBitRate,
        sp<MediaWriter> *mediaWriter) {
    mediaWriter->clear();
    *totalBitRate = 0;
    status_t err = OK;
    sp<MediaWriter> writer = new MPEG4Writer(outputFd);
 
    if (mVideoSource < VIDEO_SOURCE_LIST_END) {
 
        sp<MediaSource> mediaSource;
        err = setupMediaSource(&mediaSource); // very important
        if (err != OK) {
            return err;
        }
 
        sp<MediaSource> encoder;
        err = setupVideoEncoder(mediaSource, videoBitRate, &encoder); // very important
        if (err != OK) {
            return err;
        }
 
        writer->addSource(encoder);
        *totalBitRate += videoBitRate;
    }
 
    // Audio source is added at the end if it exists.
    // This help make sure that the "recoding" sound is suppressed for
    // camcorder applications in the recorded files.
    if (!mCaptureTimeLapse && (mAudioSource != AUDIO_SOURCE_CNT)) {
        err = setupAudioEncoder(writer); // very important
        if (err != OK) return err;
        *totalBitRate += mAudioBitRate;
    }
 
    ...
 
    writer->setListener(mListener);
    *mediaWriter = writer;
    return OK;
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
// Set up the appropriate MediaSource depending on the chosen option
status_t StagefrightRecorder::setupMediaSource(
                      sp<MediaSource> *mediaSource) {
    if (mVideoSource == VIDEO_SOURCE_DEFAULT
            || mVideoSource == VIDEO_SOURCE_CAMERA) {
        sp<CameraSource> cameraSource;
        status_t err = setupCameraSource(&cameraSource);
        if (err != OK) {
            return err;
        }
        *mediaSource = cameraSource;
    } else if (mVideoSource == VIDEO_SOURCE_GRALLOC_BUFFER) {
        // If using GRAlloc buffers, setup surfacemediasource.
        // Later a handle to that will be passed
        // to the client side when queried
        status_t err = setupSurfaceMediaSource();
        if (err != OK) {
            return err;
        }
        *mediaSource = mSurfaceMediaSource;
    } else {
        return INVALID_OPERATION;
    }
    return OK;
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
status_t StagefrightRecorder::setupCameraSource(
        sp<CameraSource> *cameraSource) {
    status_t err = OK;
    if ((err = checkVideoEncoderCapabilities()) != OK) {
        return err;
    }
    Size videoSize;
    videoSize.width = mVideoWidth;
    videoSize.height = mVideoHeight;
    if (mCaptureTimeLapse) {
        if (mTimeBetweenTimeLapseFrameCaptureUs < 0) {
            ALOGE("Invalid mTimeBetweenTimeLapseFrameCaptureUs value: %lld",
                mTimeBetweenTimeLapseFrameCaptureUs);
            return BAD_VALUE;
        }
 
        mCameraSourceTimeLapse = CameraSourceTimeLapse::CreateFromCamera(
                mCamera, mCameraProxy, mCameraId,
                videoSize, mFrameRate, mPreviewSurface,
                mTimeBetweenTimeLapseFrameCaptureUs);
        *cameraSource = mCameraSourceTimeLapse;
    } else {
        *cameraSource = CameraSource::CreateFromCamera(
                mCamera, mCameraProxy, mCameraId, videoSize, mFrameRate,
                mPreviewSurface, true /*storeMetaDataInVideoBuffers*/);
    }
    mCamera.clear();
    mCameraProxy.clear();
    if (*cameraSource == NULL) {
        return UNKNOWN_ERROR;
    }
 
    if ((*cameraSource)->initCheck() != OK) {
        (*cameraSource).clear();
        *cameraSource = NULL;
        return NO_INIT;
    }
 
    // When frame rate is not set, the actual frame rate will be set to
    // the current frame rate being used.
    if (mFrameRate == -1) {
        int32_t frameRate = 0;
        CHECK ((*cameraSource)->getFormat()->findInt32(
                    kKeyFrameRate, &frameRate));
        ALOGI("Frame rate is not explicitly set. Use the current frame "
             "rate (%d fps)", frameRate);
        mFrameRate = frameRate;
    }
 
    CHECK(mFrameRate != -1);
 
    mIsMetaDataStoredInVideoBuffers =
        (*cameraSource)->isMetaDataStoredInVideoBuffers();
 
    return OK;
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
status_t StagefrightRecorder::setupVideoEncoder(
        sp<MediaSource> cameraSource,
        int32_t videoBitRate,
        sp<MediaSource> *source) {
    source->clear();
 
    sp<MetaData> enc_meta = new MetaData;
    enc_meta->setInt32(kKeyBitRate, videoBitRate);
    enc_meta->setInt32(kKeyFrameRate, mFrameRate);
 
    switch (mVideoEncoder) {
        case VIDEO_ENCODER_H263:
            enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263);
            break;
 
        case VIDEO_ENCODER_MPEG_4_SP:
            enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4);
            break;
 
        case VIDEO_ENCODER_H264:
            enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
            break;
 
        default:
            CHECK(!"Should not be here, unsupported video encoding.");
            break;
    }
 
    sp<MetaData> meta = cameraSource->getFormat();
 
    int32_t width, height, stride, sliceHeight, colorFormat;
    CHECK(meta->findInt32(kKeyWidth, &width));
    CHECK(meta->findInt32(kKeyHeight, &height));
    CHECK(meta->findInt32(kKeyStride, &stride));
    CHECK(meta->findInt32(kKeySliceHeight, &sliceHeight));
    CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
 
    enc_meta->setInt32(kKeyWidth, width);
    enc_meta->setInt32(kKeyHeight, height);
    enc_meta->setInt32(kKeyIFramesInterval, mIFramesIntervalSec);
    enc_meta->setInt32(kKeyStride, stride);
    enc_meta->setInt32(kKeySliceHeight, sliceHeight);
    enc_meta->setInt32(kKeyColorFormat, colorFormat);
    if (mVideoTimeScale > 0) {
        enc_meta->setInt32(kKeyTimeScale, mVideoTimeScale);
    }
    if (mVideoEncoderProfile != -1) {
        enc_meta->setInt32(kKeyVideoProfile, mVideoEncoderProfile);
    }
    if (mVideoEncoderLevel != -1) {
        enc_meta->setInt32(kKeyVideoLevel, mVideoEncoderLevel);
    }
 
    OMXClient client;
    CHECK_EQ(client.connect(), (status_t)OK);
 
    uint32_t encoder_flags = 0;
    if (mIsMetaDataStoredInVideoBuffers) {
        encoder_flags |= OMXCodec::kStoreMetaDataInVideoBuffers;
    }
 
    // Do not wait for all the input buffers to become available.
    // This give timelapse video recording faster response in
    // receiving output from video encoder component.
    if (mCaptureTimeLapse) {
        encoder_flags |= OMXCodec::kOnlySubmitOneInputBufferAtOneTime;
    }
 
    sp<MediaSource> encoder = OMXCodec::Create(
            client.interface(), enc_meta,
            true /* createEncoder */, cameraSource,
            NULL, encoder_flags);
    if (encoder == NULL) {
        ALOGW("Failed to create the encoder");
        // When the encoder fails to be created, we need
        // release the camera source due to the camera's lock
        // and unlock mechanism.
        cameraSource->stop();
        return UNKNOWN_ERROR;
    }
 
    *source = encoder;
 
    return OK;
}

这里和OMXCodec关联起来
有一个叫media_codecs.xml的配置文件来表明设备支持哪些codec

我们录制MPEG 4的时候还会有声音,所以后面还有个setupAudioEncoder,具体的方法就不展开了,总之就是把声音也作为一个Track加入到MPEG4Writer当中去。
这里插个题外话,Google说把setupAudioEncoder放到后面是为了避免开始录影的那一个提示声音也被录制进去,但是实际发现它这样做还是会有bug,在一些设备上还是会把那声录制进去,这个遇到的都是靠APP自己来播放声音来绕过这个问题的。

另外MPEG4Writer当中有个
start(MetaData*)
启动两个方法
a) startWriterThread

启动一个thread去写

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
void MPEG4Writer::threadFunc() {
    ALOGV("threadFunc");
 
    prctl(PR_SET_NAME, (unsigned long)"MPEG4Writer", 0, 0, 0);
 
    Mutex::Autolock autoLock(mLock);
    while (!mDone) {
        Chunk chunk;
        bool chunkFound = false;
 
        while (!mDone && !(chunkFound = findChunkToWrite(&chunk))) {
            mChunkReadyCondition.wait(mLock);
        }
 
        // Actual write without holding the lock in order to
        // reduce the blocking time for media track threads.
        if (chunkFound) {
            mLock.unlock();
            writeChunkToFile(&chunk);
            mLock.lock();
        }
    }
 
    writeAllChunks();
}

b) startTracks

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
status_t MPEG4Writer::startTracks(MetaData *params) {
    for (List<Track *>::iterator it = mTracks.begin();
         it != mTracks.end(); ++it) {
        status_t err = (*it)->start(params);
 
        if (err != OK) {
            for (List<Track *>::iterator it2 = mTracks.begin();
                 it2 != it; ++it2) {
                (*it2)->stop();
            }
 
            return err;
        }
    }
    return OK;
}

然后调用每个Track的start方法

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
status_t MPEG4Writer::Track::start(MetaData *params) {
    ...
 
    initTrackingProgressStatus(params);
 
    ...
 
    status_t err = mSource->start(meta.get()); // 这里会去执行CameraSource(start),这两个是相互关联的
 
    ...
 
    pthread_create(&mThread, &attr, ThreadWrapper, this);
    return OK;
}
 
void *MPEG4Writer::Track::ThreadWrapper(void *me) {
    Track *track = static_cast<Track *>(me);
 
    status_t err = track->threadEntry();
    return (void *) err;
}

通过status_t MPEG4Writer::Track::threadEntry()
是新启动另外一个thread,它里面会通过一个循环来不断读取CameraSource(read)里面的数据,CameraSource里面的数据当然是从driver返回过来的(可以参见CameraSourceListener,CameraSource用一个叫做mFrameReceived的List专门存放从driver过来的数据,如果收到数据会调用mFrameAvailableCondition.signal,若还没有开始录影,这个时候收到的数据是被丢弃的,当然MediaWriter先启动的是CameraSource的start方法,再启动写Track),然后写到文件当中。
注意:准确来说这里MPEG4Writer读取的是OMXCodec里的数据,因为数据先到CameraSource,codec负责编码之后,MPEG4Writer才负责写到文件当中!关于数据在CameraSource/OMXCodec/MPEG4Writer之间是怎么传递的,可以参见http://guoh.org/lifelog/2013/06/interaction-between-stagefright-and-codec/当中讲Buffer的传输过程。

回头再来看,Stagefright做了什么事情?我更觉得它只是一个粘合剂(glue)的用处,它工作在MediaPlayerService这一层,把MediaSource,MediaWriter,Codec以及上层的MediaRecorder绑定在一起,这应该就是它最大的作用,Google用它来替换Opencore也是符合其一贯的工程派作风(相比复杂的学术派而言,虽然Google很多东西也很复杂,但是它一般都是以尽量简单的方式来解决问题)。
让大家觉得有点不习惯的是,它把MediaRecorder放在MediaPlayerService当中,这两个看起来是对立的事情,或者某一天它们会改名字,或者是两者分开,不知道~~

当然这只是个简单的大体介绍,Codec相关的后面争取专门来分析一下!

有些细节的东西在这里没有列出,需要的话会把一些注意点列出来:

1. 时光流逝录影
CameraSource对应的就是CameraSourceTimeLapse

具体做法就是在
dataCallbackTimestamp
当中有skipCurrentFrame

当然它是用些变量来记录和计算
mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate) // 两个frame之间的间隔时间
记录上一个frame的(mLastTimeLapseFrameRealTimestampUs) // 上一个frame发生的时间
然后通过frame rate计算出两个frame之间的相距离时间,中间的都透过releaseOneRecordingFrame来drop掉
也就是说driver返回的东西都不变,只是在SW这层我们自己来处理掉

关于Time-lapse相关的可以参阅
https://en.wikipedia.org/wiki/Time-lapse_photography

2. 录影当中需要用到Camera的话是通过ICameraRecordingProxy,即Camera当中的RecordingProxy(这是一个BnCameraRecordingProxy)
当透过binder,将ICameraRecordingProxy传到服务端进程之后,它就变成了Bp,如下:

1
2
3
4
5
6
7
8
9
case SET_CAMERA: {
    ALOGV("SET_CAMERA");
    CHECK_INTERFACE(IMediaRecorder, data, reply);
    sp<ICamera> camera = interface_cast<ICamera>(data.readStrongBinder());
    sp<ICameraRecordingProxy> proxy =
        interface_cast<ICameraRecordingProxy>(data.readStrongBinder());
    reply->writeInt32(setCamera(camera, proxy));
    return NO_ERROR;
} break;

在CameraSource当中会这样去使用

1
2
3
4
5
6
7
8
// We get the proxy from Camera, not ICamera. We need to get the proxy
// to the remote Camera owned by the application. Here mCamera is a
// local Camera object created by us. We cannot use the proxy from
// mCamera here.
mCamera = Camera::create(camera);
if (mCamera == 0) return -EBUSY;
mCameraRecordingProxy = proxy;
mCameraFlags |= FLAGS_HOT_CAMERA;

疑问点:

CameraSource当中这个
List > mFramesBeingEncoded;
有什么用?
每编码完一个frame,CameraSource就会将其保存起来,Buffer被release的时候,会反过来release掉这些frame(s),这种做法是为了效率么?为什么不编码完一个frame就将其release掉?
另外不得不再感叹下Google经常的delete this;行为,精妙,但是看起来反常!


原文地址: http://guoh.org/lifelog/2013/06/android-mediarecorder-architecture/

发布了23 篇原创文章 · 获赞 449 · 访问量 224万+
展开阅读全文

关于Android程序闪退的错误日志

04-13

编写了一个包含listview的页面,listview基于另外一个activity可以通过按钮进行操作 但是在打开程序时出现了闪退现象,求大神帮忙解读一下错误日志,感谢 04-13 23:39:21.103: D/ResourcesManager(18150): For user 0 new overlays fetched Null 04-13 23:39:21.113: W/System(18150): ClassLoader referenced unknown path: /data/app/com.example.newrecorder-1/lib/arm64 04-13 23:39:21.173: D/AbsListView(18150): Get MotionRecognitionManager 04-13 23:39:21.183: E/MotionRecognitionManager(18150): mSContextService = android.hardware.scontext.ISContextService$Stub$Proxy@23c9d5e 04-13 23:39:21.183: E/MotionRecognitionManager(18150): motionService = com.samsung.android.motion.IMotionRecognitionService$Stub$Proxy@75d53f 04-13 23:39:21.183: E/MotionRecognitionManager(18150): motionService = com.samsung.android.motion.IMotionRecognitionService$Stub$Proxy@75d53f 04-13 23:39:21.183: I/MediaPlayer(18150): Need to enable context aware info 04-13 23:39:21.183: V/MediaPlayer-JNI(18150): native_setup 04-13 23:39:21.193: E/ExtMediaPlayer-JNI(18150): env->IsInstanceOf fails 04-13 23:39:21.193: E/MediaPlayer-JNI(18150): JNIMediaPlayerFactory: bIsQCMediaPlayerPresent 0 04-13 23:39:21.193: E/ExtMediaPlayer-JNI(18150): env->IsInstanceOf fails 04-13 23:39:21.193: E/MediaPlayer-JNI(18150): JNIMediaPlayerFactory: bIsQCMediaPlayerPresent 0 04-13 23:39:21.193: V/MediaPlayer(18150): constructor 04-13 23:39:21.193: V/MediaPlayer(18150): setListener 04-13 23:39:21.193: D/AndroidRuntime(18150): Shutting down VM 04-13 23:39:21.193: E/AndroidRuntime(18150): FATAL EXCEPTION: main 04-13 23:39:21.193: E/AndroidRuntime(18150): Process: com.example.newrecorder, PID: 18150 04-13 23:39:21.193: E/AndroidRuntime(18150): java.lang.RuntimeException: Unable to start activity ComponentInfo{com.example.newrecorder/com.example.newrecorder.MainActivity}: java.lang.RuntimeException: setAudioSource failed. 04-13 23:39:21.193: E/AndroidRuntime(18150): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:3254) 04-13 23:39:21.193: E/AndroidRuntime(18150): at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:3350) 04-13 23:39:21.193: E/AndroidRuntime(18150): at android.app.ActivityThread.access$1100(ActivityThread.java:223) 04-13 23:39:21.193: E/AndroidRuntime(18150): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1794) 04-13 23:39:21.193: E/AndroidRuntime(18150): at android.os.Handler.dispatchMessage(Handler.java:102) 04-13 23:39:21.193: E/AndroidRuntime(18150): at android.os.Looper.loop(Looper.java:148) 04-13 23:39:21.193: E/AndroidRuntime(18150): at android.app.ActivityThread.main(ActivityThread.java:7224) 04-13 23:39:21.193: E/AndroidRuntime(18150): at java.lang.reflect.Method.invoke(Native Method) 04-13 23:39:21.193: E/AndroidRuntime(18150): at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:1230) 04-13 23:39:21.193: E/AndroidRuntime(18150): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:1120) 04-13 23:39:21.193: E/AndroidRuntime(18150): Caused by: java.lang.RuntimeException: setAudioSource failed. 04-13 23:39:21.193: E/AndroidRuntime(18150): at android.media.MediaRecorder._setAudioSource(Native Method) 04-13 23:39:21.193: E/AndroidRuntime(18150): at android.media.MediaRecorder.setAudioSource(MediaRecorder.java:488) 04-13 23:39:21.193: E/AndroidRuntime(18150): at com.example.newrecorder.MainActivity.onCreate(MainActivity.java:57) 04-13 23:39:21.193: E/AndroidRuntime(18150): at android.app.Activity.performCreate(Activity.java:6877) 04-13 23:39:21.193: E/AndroidRuntime(18150): at android.app.Instrumentation.callActivityOnCreate(Instrumentation.java:1136) 04-13 23:39:21.193: E/AndroidRuntime(18150): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:3207) 04-13 23:39:21.193: E/AndroidRuntime(18150): ... 9 more 04-13 23:39:23.273: I/Process(18150): Sending signal. PID: 18150 SIG: 9 问答

没有更多推荐了,返回首页

©️2019 CSDN 皮肤主题: 编程工作室 设计师: CSDN官方博客

分享到微信朋友圈

×

扫一扫,手机浏览