Android Codec2编码介绍

谷歌在Android Q上推出了Codec2.0,指在于取代ACodec与OpenMAX,它可以看作是一套新的对接MediaCodec的中间件,往上对接MediaCodec Native层,往下提供新的API标准供编解码使用,相当于ACodec 2.0。

Codec2编码调用示例:

#ifndef __C2_ENCODER_H__
#define __C2_ENCODER_H__

#include <C2Component.h>
#include <C2Config.h>
#include <hidl/HidlSupport.h>
#include <C2AllocatorIon.h>
#include <C2Buffer.h>
#include <C2BufferPriv.h>
#include <C2AllocatorGralloc.h>
#include <C2Buffer.h>
#include <C2BufferPriv.h>
#include <C2Component.h>
#include <C2Config.h>
#include <C2Debug.h>
#include <C2PlatformSupport.h>
#include <C2Work.h>
#include <codec2/hidl/client.h>
//#include <media/NdkMediaCodec.h>

#include <vector>
#include <iostream>
#include <fstream>
#include <stdio.h>
#include <algorithm>

//#include "C2Common.h"

#ifdef __cplusplus
extern "C"
{
#endif

#include "gstcodec2enc.hh"

#ifdef __cplusplus
}
#endif

using namespace std;

#define MAX_RETRY 20

#define TIME_OUT 400ms

#define MAX_INPUT_BUFFERS 8

#define DEFAULT_AUDIO_FRAME_SIZE 4096

constexpr int32_t KDefaultFrameRate = 25;

typedef void (*DataCb)(GstCodec2enc* codec2enc,const uint8_t* data,int size,long int timestamp);

using android::C2AllocatorIon;

class LinearBuffer : public C2Buffer {
  public:
    explicit LinearBuffer(const std::shared_ptr<C2LinearBlock> &block)
        : C2Buffer({block->share(block->offset(), block->size(), ::C2Fence())}) {}

    explicit LinearBuffer(const std::shared_ptr<C2LinearBlock> &block, size_t size)
        : C2Buffer({block->share(block->offset(), size, ::C2Fence())}) {}
};

class GraphicBuffer : public C2Buffer {
  public:
    explicit GraphicBuffer(const std::shared_ptr<C2GraphicBlock> &block)
        : C2Buffer({block->share(C2Rect(block->width(), block->height()), ::C2Fence())}) {}
};

/**
 * Handle Callback functions onWorkDone(), onTripped(),
 * onError(), onDeath(), onFramesRendered() for C2 Components
 */
struct CodecListener : public android::Codec2Client::Listener {
  public:
    CodecListener(
            const std::function<void(std::list<std::unique_ptr<C2Work>> &workItems)> fn = nullptr)
        : callBack(fn) {}
    virtual void onWorkDone(const std::weak_ptr<android::Codec2Client::Component> &comp,
                            std::list<std::unique_ptr<C2Work>> &workItems) override {
        ALOGV("onWorkDone called");
        (void)comp;
        if (callBack) callBack(workItems);
    }

    virtual void onTripped(
            const std::weak_ptr<android::Codec2Client::Component> &comp,
            const std::vector<std::shared_ptr<C2SettingResult>> &settingResults) override {
        (void)comp;
        (void)settingResults;
    }

    virtual void onError(const std::weak_ptr<android::Codec2Client::Component> &comp,
                         uint32_t errorCode) override {
        (void)comp;
        ALOGV("onError called");
        if (errorCode != 0) ALOGE("Error : %u", errorCode);
    }

    virtual void onDeath(const std::weak_ptr<android::Codec2Client::Component> &comp) override {
        (void)comp;
    }

    virtual void onInputBufferDone(uint64_t frameIndex, size_t arrayIndex) override {
        (void)frameIndex;
        (void)arrayIndex;
    }

    virtual void onFrameRendered(uint64_t bufferQueueId, int32_t slotId,
                                 int64_t timestampNs) override {
        (void)bufferQueueId;
        (void)slotId;
        (void)timestampNs;
    }

    std::function<void(std::list<std::unique_ptr<C2Work>> &workItems)> callBack;
};

class C2Encoder{
  public:
    C2Encoder()
        : mMime(""),
          mEos(false),
          mClient(nullptr),
          mBlockPoolId(0),
          mLinearPool(nullptr),
          mGraphicPool(nullptr),
          mLinearAllocator(nullptr),
          mGraphicAllocator(nullptr),
          mIsAudioEncoder(false),
          mProfile(C2Config::PROFILE_UNUSED),
          mLevel(C2Config::LEVEL_UNUSED),
          mWidth(0),
          mHeight(0),
          mNumInputFrame(0),
          mComponent(nullptr),
          mCodec2enc(nullptr){
            GST_DEBUG("Construct C2Encoder\n");
          }

    ~C2Encoder()
    {
        GST_DEBUG("~C2Encoder");
    }

    int32_t createCodec2Component(std::string codecName);
    
    int32_t encodeFrame(const char *data,int32_t frameSize);

    int32_t encodeFrames(std::ifstream &eleStream, size_t inputBufferSize);

    int32_t getInputMaxBufSize();

    void deInitCodec();

    void resetEncoder();

    int32_t setupCodec2();

    void waitOnInputConsumption();

    // callback function to process onWorkDone received by Listener
    void handleWorkDone(std::list<std::unique_ptr<C2Work>> &workItems);

    bool mEos;

    static long int total;
    
    void setCodec2Enc(GstCodec2enc *codec2enc)
    {
        mCodec2enc = codec2enc;
    }
    void setMime(std::string mime)
    {
        mMime = mime;
    }

    std::string getMime()
    {
        return  mMime;
    }

    void setProfile(uint profile)
    {
        switch(profile)
        {
            case 0:
                mProfile = C2Config::PROFILE_AVC_BASELINE;
            break;
            case 1:
                mProfile = C2Config::PROFILE_AVC_CONSTRAINED_BASELINE;
            break;
            case 2:
                mProfile = C2Config::PROFILE_AVC_MAIN;
            break;
            case 3:
                mProfile = C2Config::PROFILE_AVC_HIGH;
            break;
            case 4:
                mProfile = C2Config::PROFILE_HEVC_MAIN;
            break;
            default:
                mProfile = C2Config::PROFILE_AVC_MAIN;
            break;
        }
    }

    void setLevel(uint level)
    {
        switch(level)
        {
            case 0:
                mLevel = C2Config::LEVEL_AVC_1;
            break;
            case 1:
                mLevel = C2Config::LEVEL_AVC_2;
            break;
            case 2:
                mLevel = C2Config::LEVEL_AVC_3;
            break;
            case 3:
                mLevel = C2Config::LEVEL_AVC_4;
            break;
            case 4:
                mLevel = C2Config::LEVEL_AVC_5;
            break;
            case 5:
                mLevel = C2Config::LEVEL_HEVC_MAIN_1;
            break;
            case 6:
                mLevel = C2Config::LEVEL_HEVC_MAIN_2;
            break;
            default:
                mLevel = C2Config::LEVEL_AVC_4;
            break;
        }
    }

    void setWidth(int32_t width)
    {
        mWidth = width;
    }

    int32_t getWidth()
    {
        return mWidth;
    }

    void setHeight(int32_t height)
    {
        mHeight = height;
    }

    int32_t getHeight()
    {
        return mHeight;
    }

    void setFrameRate(int32_t frame_rate)
    {
        mFrameRate = frame_rate;
    }

    int32_t getFrameRate()
    {
        return mFrameRate;
    }

    void setSampleRate(int32_t sample_rate)
    {
        mSampleRate = sample_rate;
    }

    int32_t getSampleRate()
    {
        return mSampleRate;
    }

    void setColorFormat(int32_t color_format)
    {
        mColorFormat = color_format;
    }

    int32_t getColorFormat()
    {
        return mColorFormat;
    }

    void setBitRate(int32_t bit_rate)
    {
        mBitRate = bit_rate;
    }

    int32_t getBitRate()
    {
        return mBitRate;
    }

    void setChannelNum(int32_t channel_num)
    {
        mChannelNum = channel_num;
    }

    int32_t getChannelNum()
    {
        return mChannelNum;
    }


    void setProcessDataCb(DataCb cb)
    {
        onProcessDataCb = cb;
    }

    void flush();

  private:
    bool mIsAudioEncoder;
    std::string  mMime;
    C2Config::profile_t mProfile;
    C2Config::level_t mLevel;
    int32_t mWidth;
    int32_t mHeight;
    int32_t mFrameRate;
    int32_t mBitRate;
    int32_t mSampleRate;
    int32_t mColorFormat;
    int32_t mNumInputFrame;
    int32_t mInputMaxBufSize;
    int32_t mChannelNum;

    std::shared_ptr<android::Codec2Client> mClient;

    C2BlockPool::local_id_t mBlockPoolId;
    std::shared_ptr<C2BlockPool> mLinearPool;
    std::shared_ptr<C2BlockPool> mGraphicPool;
    std::shared_ptr<C2Allocator> mLinearAllocator;
    std::shared_ptr<C2Allocator> mGraphicAllocator;

    std::mutex mQueueLock;
    std::condition_variable mQueueCondition;
    std::list<std::unique_ptr<C2Work>> mWorkQueue;
    //std::ofstream fout;
    std::shared_ptr<android::Codec2Client::Listener> mListener;
    std::shared_ptr<android::Codec2Client::Component> mComponent;
    DataCb onProcessDataCb;
    GstCodec2enc *mCodec2enc;
};

#endif  // __C2_ENCODER_H__
#define LOG_TAG "C2Encoder"

#include "C2Encoder.h" 

long int C2Encoder::total = 0;

class Codec2ClientInterfaceWrapper : public C2ComponentStore {
    std::shared_ptr<android::Codec2Client> mClient;

public:
    Codec2ClientInterfaceWrapper(std::shared_ptr<android::Codec2Client> client)
        : mClient(client) { }

    virtual ~Codec2ClientInterfaceWrapper() = default;

    virtual c2_status_t config_sm(
            const std::vector<C2Param *> &params,
            std::vector<std::unique_ptr<C2SettingResult>> *const failures) {
        return mClient->config(params, C2_MAY_BLOCK, failures);
    };

    virtual c2_status_t copyBuffer(
            std::shared_ptr<C2GraphicBuffer>,
            std::shared_ptr<C2GraphicBuffer>) {
        return C2_OMITTED;
    }

    virtual c2_status_t createComponent(
            C2String, std::shared_ptr<C2Component> *const component) {
        component->reset();
        return C2_OMITTED;
    }

    virtual c2_status_t createInterface(
            C2String, std::shared_ptr<C2ComponentInterface> *const interface) {
        interface->reset();
        return C2_OMITTED;
    }

    virtual c2_status_t query_sm(
            const std::vector<C2Param *> &stackParams,
            const std::vector<C2Param::Index> &heapParamIndices,
            std::vector<std::unique_ptr<C2Param>> *const heapParams) const {
        return mClient->query(stackParams, heapParamIndices, C2_MAY_BLOCK, heapParams);
    }

    virtual c2_status_t querySupportedParams_nb(
            std::vector<std::shared_ptr<C2ParamDescriptor>> *const params) const {
        return mClient->querySupportedParams(params);
    }

    virtual c2_status_t querySupportedValues_sm(
            std::vector<C2FieldSupportedValuesQuery> &fields) const {
        return mClient->querySupportedValues(fields, C2_MAY_BLOCK);
    }

    virtual C2String getName() const {
        return mClient->getName();
    }

    virtual std::shared_ptr<C2ParamReflector> getParamReflector() const {
        return mClient->getParamReflector();
    }

    virtual std::vector<std::shared_ptr<const C2Component::Traits>> listComponents() {
        return std::vector<std::shared_ptr<const C2Component::Traits>>();
    }
};

int32_t C2Encoder::setupCodec2() {
    GST_DEBUG("In %s", __func__);
    mClient = android::Codec2Client::CreateFromService("default");
    if (!mClient) {
        mClient = android::Codec2Client::CreateFromService("software");
    }

    if (!mClient) 
    {
        GST_DEBUG("CreateFromService failed\n");
        return -1;
    }

    android::SetPreferredCodec2ComponentStore(std::make_shared<Codec2ClientInterfaceWrapper>(mClient));
	
    std::shared_ptr<C2AllocatorStore> store = android::GetCodec2PlatformAllocatorStore();
    if (!store) return -1;

    c2_status_t status = store->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &mLinearAllocator);
    if (status != C2_OK) return status;

    mLinearPool = std::make_shared<C2PooledBlockPool>(mLinearAllocator, mBlockPoolId++);
    if (!mLinearPool) return -1;

    status = store->fetchAllocator(C2AllocatorStore::DEFAULT_GRAPHIC, &mGraphicAllocator);
    if (status != C2_OK) return status;

    mGraphicPool = std::make_shared<C2PooledBlockPool>(mGraphicAllocator, mBlockPoolId++);
    if (!mGraphicPool) return -1;

    for (int i = 0; i < MAX_INPUT_BUFFERS; ++i) {
        mWorkQueue.emplace_back(new C2Work);
    }
    return status;
}


void C2Encoder::handleWorkDone(std::list<std::unique_ptr<C2Work>> &workItems) {
    std::shared_ptr<C2Buffer> output;
    for (std::unique_ptr<C2Work> &work : workItems) {
        if (!work->worklets.empty()) {
            if (work->worklets.front()->output.flags != C2FrameData::FLAG_INCOMPLETE) {
                mEos = (work->worklets.front()->output.flags & C2FrameData::FLAG_END_OF_STREAM) !=0;

                size_t csdSize = work->worklets.front()->output.configUpdate.size();
                
                if(csdSize != 0){
                    
                    for(const std::unique_ptr<C2Param> &param : work->worklets.front()->output.configUpdate){
                        if(param){
                            C2StreamInitDataInfo::output *csdBuffer = (C2StreamInitDataInfo::output*)(param.get());
                            // for(int i = 0;i < csdBuffer->flexCount();i++){
                            //     fprintf(stderr,"%x ",csdBuffer->m.value[i]);
                            // }
                            // fprintf(stderr,"\n");
                            if(onProcessDataCb != NULL)
                            {
                                onProcessDataCb(mCodec2enc,(csdBuffer->m.value),csdBuffer->flexCount(),-1);
                            }
                        }
                    }

                } 

                size_t size = work->worklets.front()->output.buffers.size();
                if(size == 1u)
                {
                    output = work->worklets.front()->output.buffers[0];
                    const C2ConstLinearBlock linearBlock = output->data().linearBlocks().front();
                    C2ReadView readView = linearBlock.map().get();
                    const uint8_t*constData = readView.data();
                    uint32_t dataSize = readView.capacity();
                    if(onProcessDataCb != NULL)
                    {
                        onProcessDataCb(mCodec2enc,constData,dataSize,(work->worklets.front()->output.ordinal.timestamp*1000ll).peekll());
                    }
                }

                work->input.buffers.clear();
                work->worklets.clear();

                {
                    typedef std::unique_lock<std::mutex> ULock;
                    ULock l(mQueueLock);
                    mWorkQueue.push_back(std::move(work));
                    mQueueCondition.notify_all();
                }
            }
        }
    }
}

int32_t C2Encoder::createCodec2Component(std::string compName) {
    ALOGV("In %s", __func__);

    mListener.reset(new CodecListener(
            [this](std::list<std::unique_ptr<C2Work>> &workItems) { handleWorkDone(workItems); }));
    if (!mListener) return -1;

    if (mMime == "") {
        ALOGE("Error in mime");
        return -1;
    }
    // Configure the plugin with Input properties
    std::vector<C2Param *> configParam;
    if (mMime.compare("audio/") == 0) {
        fprintf(stderr,"audio\n");
        mIsAudioEncoder = true;
        C2StreamSampleRateInfo::input sampleRateInfo(0u, mSampleRate);
        C2StreamChannelCountInfo::input channelCountInfo(0u, mChannelNum);
        configParam.push_back(&sampleRateInfo);
        configParam.push_back(&channelCountInfo);
    } else {
        mIsAudioEncoder = false;
        // C2StreamProfileLevelInfo::input level(0u,C2Config::PROFILE_AVC_MAIN,C2Config::LEVEL_AVC_3);
        // configParam.push_back(&level);

        // C2StreamProfileLevelInfo::output ProfileLevel(0u,C2Config::PROFILE_AVC_HIGH,C2Config::LEVEL_AVC_5);
        C2StreamProfileLevelInfo::output ProfileLevel(0u,mProfile,mLevel);
        configParam.push_back(&ProfileLevel);

        C2StreamPictureSizeInfo::input inputSize(0u, mWidth, mHeight);
        configParam.push_back(&inputSize);
      
        // C2StreamFrameRateInfo::input inputFrameRate(0u,mFrameRate);
        // configParam.push_back(&inputFrameRate);

        C2StreamFrameRateInfo::output outputFrameRate(0u,mFrameRate);
        configParam.push_back(&outputFrameRate);

        // C2StreamPixelFormatInfo::input inputColorFormat(0u,mColorFormat);
        // configParam.push_back(&inputColorFormat);
        
        // C2StreamBitrateInfo::input inputBitRate(0u,mBitRate);
        // configParam.push_back(&inputBitRate);
        C2StreamBitrateInfo::output outputBitRate(0u,mBitRate);
        configParam.push_back(&outputBitRate);
        GST_DEBUG("mWidth = %d,mHeight = %d,mFrameRate = %d,mColorFormat = %d,mBitRate = %d",mWidth,mHeight,mFrameRate,mColorFormat,mBitRate);
    }

//TODO:android11
/*
    mComponent = mClient->CreateComponentByName(compName.c_str(), mListener, &mClient);
    if (mComponent == nullptr) { 
        fprintf(stderr,"Create component failed for %s\n", compName.c_str());
        GST_DEBUG("Create component failed for %s\n", compName.c_str());
        return -1;
    }
*/
//TODO:android12
     if (mClient->CreateComponentByName(compName.c_str(), mListener, &mComponent, &mClient) !=
        C2_OK) {
        GST_DEBUG("Create component failed for %s", compName.c_str());
        return -1;
    }

    // std::vector<C2Component::Traits> traits = android::Codec2Client::ListComponents();
    // for(const C2Component::Traits& trait:traits)
    // {
    //     std::vector<std::string> nameAndAliases = trait.aliases;
    //     nameAndAliases.insert(nameAndAliases.begin(),trait.name);

    // }
    // std::shared_ptr<C2StoreFlexiblePixelFormatDescriptorsInfo> pixelFormatInfo;
    // std::vector<std::unique_ptr<C2Param>> heapParams;
    // if(mClient->query({},
    //                   {C2StoreFlexiblePixelFormatDescriptorsInfo::PARAM_TYPE},
    //                   C2_MAY_BLOCK,
    //                   &heapParams) == C2_OK 
    //                   && heapParams.size() == 1u)
    // {
    //     pixelFormatInfo.reset(C2StoreFlexiblePixelFormatDescriptorsInfo::From(heapParams[0].release()));
    //     for(int i = 0;i < pixelFormatInfo->flexCount();++i)
    //     {
    //         fprintf(stderr,"bit depth = %d,layout = %d\n",pixelFormatInfo->m.values[i].bitDepth,pixelFormatInfo->m.values[i].layout);
    //     }
    // }
    std::vector<std::unique_ptr<C2SettingResult>> failures;
    int32_t status = mComponent->config(configParam, C2_DONT_BLOCK, &failures);
    if (failures.size() != 0) {
        fprintf(stderr,"Invalid Configuration\n");
        GST_DEBUG("Invalid Configuration\n");
        return -1;
    }

    status |= mComponent->start();
    return status;
}

void C2Encoder::flush()
{
    std::list<std::unique_ptr<C2Work>> flushedWork;
    c2_status_t err = mComponent->flush(C2Component::FLUSH_COMPONENT,&flushedWork);
    if(err != C2_OK)
    {
        return ;
    }
    return ;
}

// In encoder components, fetch the size of input buffer allocated
int32_t C2Encoder::getInputMaxBufSize() {
    int32_t bitStreamInfo[1] = {0};
    std::vector<std::unique_ptr<C2Param>> inParams;
    c2_status_t status = mComponent->query({}, {C2StreamMaxBufferSizeInfo::input::PARAM_TYPE},
                                           C2_DONT_BLOCK, &inParams);
    if (status != C2_OK && inParams.size() == 0) {
        ALOGE("Query MaxBufferSizeInfo failed => %d", status);
        return status;
    } else {
        size_t offset = sizeof(C2Param);
        for (size_t i = 0; i < inParams.size(); ++i) {
            C2Param *param = inParams[i].get();
            bitStreamInfo[i] = *(int32_t *)((uint8_t *)param + offset);
        }
    }
    mInputMaxBufSize = bitStreamInfo[0];
    if (mInputMaxBufSize < 0) {
        ALOGE("Invalid mInputMaxBufSize %d\n", mInputMaxBufSize);
        return -1;
    }
    return status;
}

int32_t C2Encoder::encodeFrame(const char *data,int32_t frameSize){
    typedef std::unique_lock<std::mutex> ULock;
    uint64_t presentationTimeUs = 0;
    size_t offset = 0;
    c2_status_t status = C2_OK;
    uint32_t flags = 0;

    {
        std::unique_ptr<C2Work> work;
        // Prepare C2Work
        {
            ULock l(mQueueLock);
            if (mWorkQueue.empty()) 
                mQueueCondition.wait_for(l, MAX_RETRY * TIME_OUT);

            if (!mWorkQueue.empty()) {
                work.swap(mWorkQueue.front());
                mWorkQueue.pop_front();
            } else {
                cout << "Wait for generating C2Work exceeded timeout" << endl;
                return -1;
            }
        }

        if (mIsAudioEncoder) {
            presentationTimeUs = mNumInputFrame * frameSize * (1000000 / mSampleRate);
        } else {
            presentationTimeUs = mNumInputFrame * (1000000 / mFrameRate);
        }

        work->input.flags = (C2FrameData::flags_t)flags;
        work->input.ordinal.timestamp = presentationTimeUs;
        work->input.ordinal.frameIndex = mNumInputFrame;
        work->input.buffers.clear();
        if (mIsAudioEncoder) {
            std::shared_ptr<C2LinearBlock> block;
            status = mLinearPool->fetchLinearBlock(frameSize, {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE}, &block);
            if (status != C2_OK || !block) {
                cout << "fetchLinearBlock failed : " << status << endl;
                return status;
            }
            C2WriteView view = block->map().get();
            if (view.error() != C2_OK) {
                cout << "C2LinearBlock::map() failed : " << view.error() << endl;
                return view.error();
            }

            memcpy(view.base(), data, frameSize);
            work->input.buffers.emplace_back(new LinearBuffer(block));
        } else {
            std::shared_ptr<C2GraphicBlock> block;
            status = mGraphicPool->fetchGraphicBlock(
                   mWidth,
                   mHeight, 
                   HAL_PIXEL_FORMAT_YV12,//HAL_PIXEL_FORMAT_YCBCR_420_888,
                    {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE}, 
                    &block);
            if (status != C2_OK || !block) {
                cout << "fetchGraphicBlock failed : " << status << endl;
                return status;
            }
            C2GraphicView view = block->map().get();
            if (view.error() != C2_OK) {
                cout << "C2GraphicBlock::map() failed : " << view.error() << endl;
                return view.error();
            }
            
            uint8_t *pY = view.data()[C2PlanarLayout::PLANE_Y];
            uint8_t *pU = view.data()[C2PlanarLayout::PLANE_U];
            uint8_t *pV = view.data()[C2PlanarLayout::PLANE_V];
            memcpy(pY, data, mWidth * mHeight);
            memcpy(pU, data + mWidth * mHeight, (mWidth * mHeight >> 2));
            memcpy(pV, data + (mWidth * mHeight * 5 >> 2), mWidth * mHeight >> 2);
            work->input.buffers.emplace_back(new GraphicBuffer(block));
        }

        work->worklets.clear();
        work->worklets.emplace_back(new C2Worklet);

        std::list<std::unique_ptr<C2Work>> items;
        items.push_back(std::move(work));
        status = mComponent->queue(&items);
        if (status != C2_OK) {
            ALOGE("queue failed");
            return status;
        }
        mNumInputFrame++;
    }
    return status;
}


void C2Encoder::deInitCodec() {
    ALOGV("In %s", __func__);
    if (!mComponent) return;
    mComponent->stop();
    mComponent->release();
    mComponent = nullptr;

}

void C2Encoder::resetEncoder() {
    mIsAudioEncoder = false;
    mNumInputFrame = 0;
    mEos = false;
}

简化流程:

  1. 实现一个C2ComponentStore的继承类。
  2. 调用CreateFromService得到一个Codec2Client对象指针。
  3. 调用SetPreferredCodec2ComponentStore设置得到的Codec2Client到系统。
  4. 调用GetCodec2PlatformAllocatorStore得到分配器管理类C2AllocatorStore对象store。
  5. store调用fetchAllocator得到缓存分配器并通过缓存分配器得到缓存。
  6. 实现一个回调函数handleWorkDone,并设置到Codec2Client::Listener中。
  7. 设置C2Param参数列表。
  8. Codec2Client对象调用CreateComponentByName创建component。
  9. 调用component的config函数配置参数。
  10. 调用component的start函数开始运行。
  11. 在编码函数中,设置好C2Work对象相关参数。
  12. 使用缓存分配器调用fetchGraphicBlock得到缓存块block,从block中得到C2GraphicView对象view。
  13. 把yuv数据拷贝到view.data缓存中,把block设置到C2Work对象中。
  14. component调用queue函数把C2Work对象送入编码器处理。
  15. 在handleWorkDone回调函数中会得到处理后的C2Work对象列表,可从列表中得到编码后的数据。
  • 1
    点赞
  • 4
    收藏
    觉得还不错? 一键收藏
  • 2
    评论
评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值