Android CCodec Codec2 (十)Codec2Client

Codec2Client隐藏了Codec2 HAL的调用细节,并在HAL接口的基础上重新做了抽象和封装然后提供给sfPlugin使用,在这一篇文章中我们将对Codec2Client的设计结构进行简单分析。

1、Codec2Client

Codec2Client有一段注释:Codec2Client是核心类,它包含四个重要内部类,分别为Listener、Configurable、Interface和Component。Codec2框架的设计者可能是希望做到大一统,所有HAL API的封装都以Codec2Client内部类的形式来定义,本篇文章的目标是理清一个核心类和四个内部类的作用与联系。

Codec2Client的定义如下:

struct Codec2Client : public Codec2ConfigurableClient {
    typedef ::android::hardware::media::c2::V1_0::IComponentStore Base1_0;
    typedef ::android::hardware::media::c2::V1_1::IComponentStore Base1_1;
    typedef ::android::hardware::media::c2::V1_2::IComponentStore Base1_2;
    typedef Base1_0 Base;

    typedef ::android::hardware::media::c2::V1_0::IConfigurable IConfigurable;
    struct Listener;
    typedef Codec2ConfigurableClient Configurable;
    struct Component;
    struct Interface;

    typedef Codec2Client Store;
    typedef Codec2ConfigurableClient Configurable;

    static std::shared_ptr<Codec2Client> CreateFromService(
            char const* name,
            bool setAsPreferredCodec2ComponentStore = false);
protected:
    sp<Base1_0> mBase1_0;
    sp<Base1_1> mBase1_1;
    sp<Base1_2> mBase1_2;
}

Codec2Client封装的是Codec2 Service的API,CreateFromService方法的第一个参数name可以用来控制获取SW还是HW Codec2 Service。首先来看GetServiceNames方法,该方法用于获取平台上所有的Codec2服务:

std::vector<std::string> const& Codec2Client::GetServiceNames() {
    static std::vector<std::string> sServiceNames{[]() {
        using ::android::hardware::media::c2::V1_0::IComponentStore;
        using ::android::hidl::manager::V1_2::IServiceManager;

        while (true) {
            sp<IServiceManager> serviceManager = IServiceManager::getService();

            std::vector<std::string> defaultNames; // Prefixed with "default"
            std::vector<std::string> vendorNames;  // Prefixed with "vendor"
            std::vector<std::string> otherNames;   // Others
            Return<void> transResult;
            transResult = serviceManager->listManifestByInterface(
                    IComponentStore::descriptor,
                    [&defaultNames, &vendorNames, &otherNames](
                            hidl_vec<hidl_string> const& instanceNames) {
                        for (hidl_string const& instanceName : instanceNames) {
                            char const* name = instanceName.c_str();
                            if (strncmp(name, "default", 7) == 0) {
                                defaultNames.emplace_back(name);
                            } else if (strncmp(name, "vendor", 6) == 0) {
                                vendorNames.emplace_back(name);
                            } else {
                                otherNames.emplace_back(name);
                            }
                        }
                    });
            // ...
        }
    }()};
    // 所有的服务会记录到全局变量中
    return sServiceNames;
}

GetServiceNames的做法是从HW ServiceManager中查找descriptor为android.hardware.media.c2@1.0::IComponentStore的所有服务,拿到所有正在运行的服务名称。SW Codec Service的名称为software,一般情况HW Codec Service的名称为default。

所有的service都会记录到全局变量sServiceNames中,创建组件时,会查询指定的service名称时候包含在sServiceNames列表中,如果包含就获取指定的服务。

std::shared_ptr<Codec2Client> Codec2Client::CreateFromService(
        const char* name,
        bool setAsPreferredCodec2ComponentStore) {
    // 查询是否包含在sServiceNames
    size_t index = getServiceIndex(name);
    if (index == GetServiceNames().size()) {
        return nullptr;
    }
    // 获取服务,创建Codec2Client
    std::shared_ptr<Codec2Client> client = _CreateFromIndex(index);
    if (setAsPreferredCodec2ComponentStore) {
        SetPreferredCodec2ComponentStore(
                std::make_shared<Client2Store>(client));
    }
    return client;
}

std::shared_ptr<Codec2Client> Codec2Client::_CreateFromIndex(size_t index) {
    std::string const& name = GetServiceNames()[index];
    // 获取服务
    sp<Base> baseStore = Base::getService(name);
    // 获取Configurable
    Return<sp<IConfigurable>> transResult = baseStore->getConfigurable();
    sp<IConfigurable> configurable = static_cast<sp<IConfigurable>>(transResult);
    // 创建Codec2Client实例
    return std::make_shared<Codec2Client>(baseStore, configurable, index);
}

Base在Codec2Client定义为::android::hardware::media::c2::V1_0::IComponentStore,所以baseStore实际上是一个IComponentStore对象。拿到服务后,首先调用IComponentStore的getConfigurable方法,然后使用IComponentStore和IConfigurable对象一起创建Codec2Client实例。

Codec2Client::Codec2Client(sp<Base> const& base,
                           sp<IConfigurable> const& configurable,
                           size_t serviceIndex)
      : Configurable{configurable},
        mBase1_0{base},
        mBase1_1{Base1_1::castFrom(base)},
        mBase1_2{Base1_2::castFrom(base)},
        mServiceIndex{serviceIndex} {
    Return<sp<IClientManager>> transResult = base->getPoolClientManager();
    if (!transResult.isOk()) {
        LOG(ERROR) << "getPoolClientManager -- transaction failed.";
    } else {
        mHostPoolManager = static_cast<sp<IClientManager>>(transResult);
    }
}

Codec2Client的构造函数会干两件事情:

  1. 调用父类Codec2ConfigurableClient的构造函数,将IConfigurable对象做为参数传入;
  2. 调用IComponentStore的getPoolClientManager方法,要记住有这个动作,后续会用到。

Codec2Client的创建到这里就结束了,如果CreateFromService的第二个参数为true,还会额外调用SetPreferredCodec2ComponentStore方法:

void SetPreferredCodec2ComponentStore(std::shared_ptr<C2ComponentStore> componentStore) {
    static std::mutex mutex;
    std::lock_guard<std::mutex> lock(mutex); // don't interleve set-s

    // update preferred store
    {
        std::lock_guard<std::mutex> lock(gPreferredComponentStoreMutex);
        gPreferredComponentStore = componentStore;
    }

    // update platform allocator's store as well if it is alive
    std::shared_ptr<C2PlatformAllocatorStoreImpl> allocatorStore;
    {
        std::lock_guard<std::mutex> lock(gPlatformAllocatorStoreMutex);
        allocatorStore = gPlatformAllocatorStore.lock();
    }
    if (allocatorStore) {
        allocatorStore->setComponentStore(componentStore);
    }
}

SetPreferredCodec2ComponentStore的作用是将创建的Codec2Client封装为C2ComponentStore,然后使用全局变量记录下来。

2、Codec2ConfigurableClient

在Codec2中,ComponentStore、Component和Interface是携带有参数配置的模块,上层可以调用接口query、config等方法请求或者修改这些模块内部定义的参数。框架设计者将操作可配置模块的功能抽象出来,构造了Codec2ConfigurableClient这个类,上述三个携带有参数配置的模块都需要继承自Codec2ConfigurableClient。

struct Codec2ConfigurableClient {
    typedef ::android::hardware::media::c2::V1_0::IConfigurable Base;

    const C2String& getName() const;

    c2_status_t query(
            const std::vector<C2Param*>& stackParams,
            const std::vector<C2Param::Index> &heapParamIndices,
            c2_blocking_t mayBlock,
            std::vector<std::unique_ptr<C2Param>>* const heapParams) const;

    c2_status_t config(
            const std::vector<C2Param*> &params,
            c2_blocking_t mayBlock,
            std::vector<std::unique_ptr<C2SettingResult>>* const failures);

    c2_status_t querySupportedParams(
            std::vector<std::shared_ptr<C2ParamDescriptor>>* const params
            ) const;

    c2_status_t querySupportedValues(
            std::vector<C2FieldSupportedValuesQuery>& fields,
            c2_blocking_t mayBlock) const;

    // base cannot be null.
    Codec2ConfigurableClient(const sp<Base>& base);

protected:
    sp<Base> mBase;
    C2String mName;

    friend struct Codec2Client;
};

Codec2ConfigurableClient提供了以下功能:

3、Codec2Client::Component


原文阅读:
Android Codec2(十)Codec2Client

扫描下方二维码,关注公众号《青山渺渺》阅读音视频开发内容。

首先,需要在 C++ 中使用 FFmpeg 库来解码 MP4 文件,然后将解码后的视频帧传递给 OpenGL ES 进行渲染。这个过程需要用到 JNI 技术,将 C++ 的代码嵌入到 Java 中。 下面是一个简单的 JNI OpenGL ES 渲染 MP4 的代码示例: ```java public class VideoRenderer { static { System.loadLibrary("native-lib"); } public native void init(String videoPath, Surface surface); public native void render(); public native void release(); } public class MainActivity extends Activity implements SurfaceHolder.Callback { private SurfaceView mSurfaceView; private SurfaceHolder mSurfaceHolder; private VideoRenderer mRenderer; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); mSurfaceView = (SurfaceView) findViewById(R.id.surface_view); mSurfaceHolder = mSurfaceView.getHolder(); mSurfaceHolder.addCallback(this); } @Override public void surfaceCreated(SurfaceHolder holder) { mRenderer = new VideoRenderer(); mRenderer.init("/sdcard/video.mp4", holder.getSurface()); } @Override public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { } @Override public void surfaceDestroyed(SurfaceHolder holder) { mRenderer.release(); } } ``` native-lib.cpp: ```c++ #include "native-lib.h" #include <jni.h> #include <android/native_window_jni.h> #include <android/log.h> #include <EGL/egl.h> #include <GLES/gl.h> #include <GLES2/gl2.h> #include <GLES2/gl2ext.h> #include <opencv2/opencv.hpp> #include <opencv2/core/mat.hpp> #include <opencv2/imgproc/types_c.h> #include <libavutil/imgutils.h> #include <libavcodec/avcodec.h> #include <libavformat/avformat.h> #include <libswscale/swscale.h> #define TAG "OpenGLRenderer" #define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__) #define LOGI(...) __android_log_print(ANDROID_LOG_INFO, TAG, __VA_ARGS__) using namespace cv; extern "C" { static const GLfloat VERTEX_BUF[] = { -1.0f, -1.0f, 0.0f, 1.0f, -1.0f, 0.0f, -1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 0.0f }; static const GLfloat TEX_COORD_BUF[] = { 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f }; static const char *videoPath = NULL; static AVFormatContext *pFormatCtx = NULL; static AVCodecContext *pCodecCtx = NULL; static AVCodec *pCodec = NULL; static int videoStream = -1; static AVFrame *pFrame = NULL; static AVFrame *pFrameRGB = NULL; static uint8_t *buffer = NULL; static struct SwsContext *sws_ctx = NULL; static ANativeWindow *nativeWindow = NULL; static ANativeWindow_Buffer windowBuffer; static GLuint program = 0; static GLuint texture = 0; static GLuint vertexShader = 0; static GLuint fragmentShader = 0; static GLuint positionHandle = 0; static GLuint texCoordHandle = 0; static GLuint textureHandle = 0; static void checkGlError(const char *op) { GLint error; for (error = glGetError(); error; error = glGetError()) { LOGE("after %s() glError (0x%x)\n", op, error); } } static GLuint loadShader(GLenum type, const char *shaderCode) { GLuint shader = glCreateShader(type); glShaderSource(shader, 1, &shaderCode, NULL); glCompileShader(shader); return shader; } static GLuint createProgram(const char *vertexShaderCode, const char *fragmentShaderCode) { GLuint vertexShader = loadShader(GL_VERTEX_SHADER, vertexShaderCode); GLuint fragmentShader = loadShader(GL_FRAGMENT_SHADER, fragmentShaderCode); GLuint program = glCreateProgram(); glAttachShader(program, vertexShader); glAttachShader(program, fragmentShader); glLinkProgram(program); return program; } static void initShader() { const char *vertexShaderCode = "attribute vec4 position;\n" "attribute vec2 texCoord;\n" "varying vec2 vTexCoord;\n" "void main() {\n" " gl_Position = position;\n" " vTexCoord = texCoord;\n" "}"; const char *fragmentShaderCode = "precision mediump float;\n" "varying vec2 vTexCoord;\n" "uniform sampler2D texture;\n" "void main() {\n" " gl_FragColor = texture2D(texture, vTexCoord);\n" "}"; program = createProgram(vertexShaderCode, fragmentShaderCode); positionHandle = glGetAttribLocation(program, "position"); glEnableVertexAttribArray(positionHandle); glVertexAttribPointer(positionHandle, 3, GL_FLOAT, GL_FALSE, 0, VERTEX_BUF); texCoordHandle = glGetAttribLocation(program, "texCoord"); glEnableVertexAttribArray(texCoordHandle); glVertexAttribPointer(texCoordHandle, 2, GL_FLOAT, GL_FALSE, 0, TEX_COORD_BUF); textureHandle = glGetUniformLocation(program, "texture"); } static void initTexture() { glGenTextures(1, &texture); glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture); glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); } static void initDecoder() { av_register_all(); avformat_network_init(); pFormatCtx = avformat_alloc_context(); if (avformat_open_input(&pFormatCtx, videoPath, NULL, NULL) != 0) { LOGE("Couldn't open file: %s\n", videoPath); return; } if (avformat_find_stream_info(pFormatCtx, NULL) < 0) { LOGE("Couldn't find stream information.\n"); return; } for (int i = 0; i < pFormatCtx->nb_streams; i++) { if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) { videoStream = i; break; } } if (videoStream == -1) { LOGE("Couldn't find video stream.\n"); return; } pCodecCtx = avcodec_alloc_context3(NULL); avcodec_parameters_to_context(pCodecCtx, pFormatCtx->streams[videoStream]->codecpar); pCodec = avcodec_find_decoder(pCodecCtx->codec_id); if (pCodec == NULL) { LOGE("Codec not found.\n"); return; } if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) { LOGE("Could not open codec.\n"); return; } pFrame = av_frame_alloc(); pFrameRGB = av_frame_alloc(); int frameSize = av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1); buffer = (uint8_t *) av_malloc(frameSize * sizeof(uint8_t)); av_image_fill_arrays(pFrameRGB->data, pFrameRGB->linesize, buffer, AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1); sws_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_RGBA, SWS_BILINEAR, NULL, NULL, NULL); } static void initEGL(Surface *surface) { EGLDisplay display = eglGetDisplay(EGL_DEFAULT_DISPLAY); if (display == EGL_NO_DISPLAY) { LOGE("eglGetDisplay() failed.\n"); return; } EGLint majorVersion, minorVersion; if (eglInitialize(display, &majorVersion, &minorVersion) == EGL_FALSE) { LOGE("eglInitialize() failed.\n"); return; } EGLint numConfigs; EGLConfig config; EGLint configAttribs[] = { EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, EGL_SURFACE_TYPE, EGL_WINDOW_BIT, EGL_BLUE_SIZE, 8, EGL_GREEN_SIZE, 8, EGL_RED_SIZE, 8, EGL_DEPTH_SIZE, 24, EGL_NONE }; if (eglChooseConfig(display, configAttribs, &config, 1, &numConfigs) == EGL_FALSE) { LOGE("eglChooseConfig() failed.\n"); return; } EGLint contextAttribs[] = { EGL_CONTEXT_CLIENT_VERSION, 2, EGL_NONE }; EGLContext context = eglCreateContext(display, config, EGL_NO_CONTEXT, contextAttribs); if (context == EGL_NO_CONTEXT) { LOGE("eglCreateContext() failed.\n"); return; } EGLSurface eglSurface = eglCreateWindowSurface(display, config, surface, NULL); if (eglSurface == EGL_NO_SURFACE) { LOGE("eglCreateWindowSurface() failed.\n"); return; } if (eglMakeCurrent(display, eglSurface, eglSurface, context) == EGL_FALSE) { LOGE("eglMakeCurrent() failed.\n"); return; } initShader(); initTexture(); } JNIEXPORT void JNICALL Java_com_example_VideoRenderer_init(JNIEnv *env, jobject instance, jstring videoPath_, jobject surface) { videoPath = env->GetStringUTFChars(videoPath_, 0); nativeWindow = ANativeWindow_fromSurface(env, surface); initDecoder(); initEGL(nativeWindow); } JNIEXPORT void JNICALL Java_com_example_VideoRenderer_render(JNIEnv *env, jobject instance) { ANativeWindow_acquire(nativeWindow); if (ANativeWindow_lock(nativeWindow, &windowBuffer, NULL) == 0) { AVPacket packet; int frameFinished; while (av_read_frame(pFormatCtx, &packet) >= 0) { if (packet.stream_index == videoStream) { avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet); if (frameFinished) { sws_scale(sws_ctx, pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize); glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture); glTexImage2D(GL_TEXTURE_EXTERNAL_OES, 0, GL_RGBA, pCodecCtx->width, pCodecCtx->height, 0, GL_RGBA, GL_UNSIGNED_BYTE, pFrameRGB->data[0]); glUseProgram(program); glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture); glUniform1i(textureHandle, 0); glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); eglSwapBuffers(eglGetDisplay(EGL_DEFAULT_DISPLAY), eglGetCurrentSurface(EGL_DRAW)); } } av_packet_unref(&packet); } } ANativeWindow_unlockAndPost(nativeWindow); ANativeWindow_release(nativeWindow); } JNIEXPORT void JNICALL Java_com_example_VideoRenderer_release(JNIEnv *env, jobject instance) { av_frame_free(&pFrame); av_frame_free(&pFrameRGB); avcodec_close(pCodecCtx); avformat_close_input(&pFormatCtx); avformat_network_deinit(); ANativeWindow_release(nativeWindow); eglTerminate(eglGetDisplay(EGL_DEFAULT_DISPLAY)); } } ``` 需要注意的是,这里使用的是纹理类型为 GL_TEXTURE_EXTERNAL_OES,需要在 shader 中使用 samplerExternalOES 类型。 这只是一个简单的示例,实际应用中需要根据需求进行修改和优化。
评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

青山渺渺

感谢支持

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值