RK3568 重新封装V4l2为一个C++ Camera管理类 (基于RK编译环境)

本文介绍了如何在Linux环境下,使用RK3568重新封装V4l2为一个C++ CameraReader类,包括设备初始化、帧捕获与处理,以及线程管理。涉及ioctl函数的使用、内存映射和V4L2驱动操作。
摘要由CSDN通过智能技术生成

RK3568 重新封装V4l2为一个C++ Camera管理类 (基于RK编译环境,纯linux环境得小伙伴可以自己阅读代码做出对应得修改主要修改(CameraReader.cpp CameraReader,h thread.h))

CameraReader.cpp

/*
 * Copyright 2015 Rockchip Electronics Co. LTD
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

#define MODULE_TAG "CameraReader"



#include "mpp_log.h"
#include "mpp_mem.h"
#include "CameraReader.h"

CameraReader::CameraReader(const char *device, RK_U32 bufcnt, RK_U32 width, RK_U32 height, MppFrameFormat fmt)
:thread(device),
mBufcnt(bufcnt),
mWidth(width),
mHeight(height),
mFmt(fmt)
{
       strcpy(mDevice,device);
}

// Wrap ioctl() to spin on EINTR
RK_S32 CameraReader::Ioctl(RK_S32 fd, RK_S32 req, void* arg)
{
    struct timespec poll_time;
    RK_S32 ret;

    while ((ret = ioctl(fd, req, arg))) {
        if (ret == -1 && (EINTR != errno && EAGAIN != errno)) {
            // mpp_err("ret = %d, errno %d", ret, errno);
            break;
        }
        // 10 milliseconds
        poll_time.tv_sec = 0;
        poll_time.tv_nsec = 10000000;
        nanosleep(&poll_time, NULL);
    }

    return ret;
}

// Create a new context to capture frames from <fname>.
// Returns NULL on error.
RK_S32 CameraReader::Init(const char *device, RK_U32 bufcnt, RK_U32 width, RK_U32 height, MppFrameFormat format)
{
    struct v4l2_capability     cap;
    struct v4l2_format         vfmt;
    struct v4l2_requestbuffers req;
    struct v4l2_buffer         buf;
    enum   v4l2_buf_type       type;
    RK_U32 i;
    RK_U32 buf_len = 0;
   
    mCtx = mpp_calloc(CamSource, 1);
    if (!mCtx)
        return -1;

    mCtx->bufcnt = bufcnt;
    mCtx->fd = open(device, O_RDWR, 0);
    if (mCtx->fd < 0) {
        mpp_err_f("Cannot open device\n");
        goto FAIL;
    }

    // Determine if fd is a V4L2 Device
    if (0 != Ioctl(mCtx->fd, VIDIOC_QUERYCAP, &cap)) {
        mpp_err_f("Not v4l2 compatible\n");
        goto FAIL;
    }

    if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) && !(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE)) {
        mpp_err_f("Capture not supported\n");
        goto FAIL;
    }

    if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
        mpp_err_f("Streaming IO Not Supported\n");
        goto FAIL;
    }

    // Preserve original settings as set by v4l2-ctl for example
    vfmt = (struct v4l2_format) {0};
    vfmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

    if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE)
        vfmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;

    vfmt.fmt.pix.width = width;
    vfmt.fmt.pix.height = height;

    if (MPP_FRAME_FMT_IS_YUV(format)) {
        vfmt.fmt.pix.pixelformat = V4L2_yuv_cfg[format - MPP_FRAME_FMT_YUV];
    } else if (MPP_FRAME_FMT_IS_RGB(format)) {
        vfmt.fmt.pix.pixelformat = V4L2_RGB_cfg[format - MPP_FRAME_FMT_RGB];
    }

    if (!vfmt.fmt.pix.pixelformat)
        vfmt.fmt.pix.pixelformat = V4L2_PIX_FMT_NV12;

    type = (v4l2_buf_type) vfmt.type;
    mCtx->type = (v4l2_buf_type) vfmt.type;

    if (-1 == Ioctl(mCtx->fd, VIDIOC_S_FMT, &vfmt)) {
        mpp_err_f("VIDIOC_S_FMT\n");
        goto FAIL;
    }

    if (-1 == Ioctl(mCtx->fd, VIDIOC_G_FMT, &vfmt)) {
        mpp_err_f("VIDIOC_G_FMT\n");
        goto FAIL;
    }

    mpp_log("get width %d height %d", vfmt.fmt.pix.width, vfmt.fmt.pix.height);

    // Request memory-mapped buffers
    req = (struct v4l2_requestbuffers) {0};
    req.count  = mCtx->bufcnt;
    req.type   = type;
    req.memory = V4L2_MEMORY_MMAP;
    if (-1 == Ioctl(mCtx->fd, VIDIOC_REQBUFS, &req)) {
        mpp_err_f("Device does not support mmap\n");
        goto FAIL;
    }

    if (req.count != mCtx->bufcnt) {
        mpp_err_f("Device buffer count mismatch\n");
        goto FAIL;
    }

    // mmap() the buffers into userspace memory
    for (i = 0 ; i < mCtx->bufcnt; i++) {
        buf = (struct v4l2_buffer) {0};
        buf.type    = type;
        buf.memory  = V4L2_MEMORY_MMAP;
        buf.index   = i;
        struct v4l2_plane planes[FMT_NUM_PLANES];
        buf.memory = V4L2_MEMORY_MMAP;
        if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == type) {
            buf.m.planes = planes;
            buf.length = FMT_NUM_PLANES;
        }

        if (-1 == Ioctl(mCtx->fd, VIDIOC_QUERYBUF, &buf)) {
            mpp_err_f("ERROR: VIDIOC_QUERYBUF\n");
            goto FAIL;
        }

        mCtx->fbuf[i].start = mmap(NULL, buf.length,
                                  PROT_READ | PROT_WRITE, MAP_SHARED,
                                  mCtx->fd, buf.m.offset);
        if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == buf.type) {
            // tmp_buffers[n_buffers].length = buf.m.planes[0].length;
            buf_len = buf.m.planes[0].length;
            mCtx->fbuf[i].start =
                mmap(NULL /* start anywhere */,
                     buf.m.planes[0].length,
                     PROT_READ | PROT_WRITE /* required */,
                     MAP_SHARED /* recommended */,
                     mCtx->fd, buf.m.planes[0].m.mem_offset);
        } else {
            buf_len = buf.length;
            mCtx->fbuf[i].start =
                mmap(NULL /* start anywhere */,
                     buf.length,
                     PROT_READ | PROT_WRITE /* required */,
                     MAP_SHARED /* recommended */,
                     mCtx->fd, buf.m.offset);
        }
        if (MAP_FAILED == mCtx->fbuf[i].start) {
            mpp_err_f("ERROR: Failed to map device frame buffers\n");
            goto FAIL;
        }
        struct v4l2_exportbuffer expbuf = (struct v4l2_exportbuffer) {0} ;
        // xcam_mem_clear (expbuf);
        expbuf.type = type;
        expbuf.index = i;
        expbuf.flags = O_CLOEXEC;
        if (Ioctl(mCtx->fd, VIDIOC_EXPBUF, &expbuf) < 0) {
            mpp_err_f("get dma buf failed\n");
            goto FAIL;
        } else {
            mpp_log("get dma buf(%d)-fd: %d\n", i, expbuf.fd);
            MppBufferInfo info;
            memset(&info, 0, sizeof(MppBufferInfo));
            info.type = MPP_BUFFER_TYPE_EXT_DMA;
            info.fd =  expbuf.fd;
            info.size = buf_len & 0x07ffffff;
            info.index = (buf_len & 0xf8000000) >> 27;
            mpp_buffer_import(&mCtx->fbuf[i].buffer, &info);
        }
        mCtx->fbuf[i].export_fd = expbuf.fd;
    }

    for (i = 0; i < mCtx->bufcnt; i++ ) {
        struct v4l2_plane planes[FMT_NUM_PLANES];

        buf = (struct v4l2_buffer) {0};
        buf.type    = type;
        buf.memory  = V4L2_MEMORY_MMAP;
        buf.index   = i;
        buf.memory = V4L2_MEMORY_MMAP;

        if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == type) {
            buf.m.planes = planes;
            buf.length = FMT_NUM_PLANES;
        }

        if (-1 == Ioctl(mCtx->fd, VIDIOC_QBUF, &buf)) {
            mpp_err_f("ERROR: VIDIOC_QBUF %d\n", i);
            Deinit(mCtx);
            goto FAIL;
        }
    }

    // Start capturing
    if (-1 == Ioctl(mCtx->fd, VIDIOC_STREAMON, &type)) {
        mpp_err_f("ERROR: VIDIOC_STREAMON\n");
        Deinit(mCtx);
        goto FAIL;
    }

    //skip some frames at start
    for (i = 0; i < mCtx->bufcnt; i++ ) {
        RK_S32 idx = GetFrame(mCtx);
        if (idx >= 0)
            PutFrame(mCtx, idx);
    }
    code = new RkH264Encode();
    code->start();
    return MPP_OK;

FAIL:
    Deinit(mCtx);
    mCtx = NULL;
    return MPP_NOK;
}

// Free a context to capture frames from <fname>.
// Returns NULL on error.
MPP_RET CameraReader::Deinit(CamSource *ctx)
{
    struct v4l2_buffer buf;
    enum v4l2_buf_type type;
    RK_U32 i;

    if (NULL == ctx)
        return MPP_OK;

    if (ctx->fd < 0)
        return MPP_OK;

    // Stop capturing
    type = ctx->type;

    Ioctl(ctx->fd, VIDIOC_STREAMOFF, &type);

    // un-mmap() buffers
    for (i = 0 ; i < ctx->bufcnt; i++) {
        buf = (struct v4l2_buffer) {0};
        buf.type    = type;
        buf.memory  = V4L2_MEMORY_MMAP;
        buf.index   = i;
        Ioctl(ctx->fd, VIDIOC_QUERYBUF, &buf);
        if (ctx->fbuf[buf.index].buffer) {
            mpp_buffer_put(ctx->fbuf[buf.index].buffer);
        }
        munmap(ctx->fbuf[buf.index].start, buf.length);
    }

    // Close v4l2 device
    close(ctx->fd);
    MPP_FREE(ctx);
    return MPP_OK;
}

// Returns a pointer to a captured frame and its meta-data. NOT thread-safe.
RK_S32 CameraReader::GetFrame(CamSource *ctx)
{
    struct v4l2_buffer buf;
    enum v4l2_buf_type type;

    type = ctx->type;
    buf = (struct v4l2_buffer) {0};
    buf.type   = type;
    buf.memory = V4L2_MEMORY_MMAP;

    struct v4l2_plane planes[FMT_NUM_PLANES];
    if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == type) {
        buf.m.planes = planes;
        buf.length = FMT_NUM_PLANES;
    }

    if (-1 == Ioctl(ctx->fd, VIDIOC_DQBUF, &buf)) {
        mpp_err_f("VIDIOC_DQBUF\n");
        return MPP_NOK;
    }

    if (buf.index > ctx->bufcnt) {
        mpp_err_f("buffer index out of bounds\n");
        return MPP_NOK;
    }

    if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == type)
        buf.bytesused = buf.m.planes[0].bytesused;
  
    return buf.index;
}

// It's OK to capture into this framebuffer now
MPP_RET CameraReader::PutFrame(CamSource *ctx, RK_S32 idx)
{
    struct v4l2_buffer buf;
    enum v4l2_buf_type type;

    if (idx < 0)
        return MPP_OK;

    type = ctx->type;
    buf = (struct v4l2_buffer) {0};
    buf.type   = type;
    buf.memory = V4L2_MEMORY_MMAP;
    buf.index  = idx;

    struct v4l2_plane planes[FMT_NUM_PLANES];
    if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == type) {
        buf.m.planes = planes;
        buf.length = FMT_NUM_PLANES;
    }

    // Tell kernel it's ok to overwrite this frame
    if (-1 == Ioctl(ctx->fd, VIDIOC_QBUF, &buf)) {
        mpp_err_f("VIDIOC_QBUF\n");
        return MPP_OK;
    }

    return MPP_OK;
}

MppBuffer CameraReader::Frame2Buf(CamSource *ctx, RK_S32 idx)
{
    if (idx < 0)
        return NULL;

    return ctx->fbuf[idx].buffer;
}
RK_S32 CameraReader::Open(){
    return Init(mDevice, mBufcnt,mWidth, mHeight, mFmt);
}

bool CameraReader::readyToRun(){
    return Open()==MPP_OK;
}
void CameraReader::start(){
    run();//线程开始跑
}
//线程主体函数
bool CameraReader::threadLoop(){
    //printf("threadLoop\n");
    static FILE *fp = NULL;
    static int count = 0;
    int index = GetFrame(mCtx);
   
    if(index == MPP_NOK){
        usleep(2000); 
        return true;
    }
    Frame2Buf(mCtx,index)//在这里出数据
     /×code->write();
     while(code->dealImageCompletes()){
      usleep(10); 
    }×/
    PutFrame(mCtx,index);
    return true;
}

CameraReader.h

/*
 * Copyright 2015 Rockchip Electronics Co. LTD
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

#ifndef __CAMERA_READER_H__
#define __CAMERA_READER_H__
#include <stdio.h>
#include <fcntl.h>
#include <unistd.h>
#include <errno.h>
#include <stdlib.h>
#include <string.h>
#include <time.h>

#include <sys/select.h>
#include <sys/mman.h>
#include <sys/ioctl.h>
#include <linux/videodev2.h>

#include "mpp_frame.h"
#include "thread.h"
#include "RkH264Encode.h"

typedef struct CamSource CamSource;
typedef struct CamFrame_t {
    void        *start;//ÍŒÏñÊýŸÝ¿ªÊŒ¶Î
    size_t      length;
    RK_S32      export_fd;
    RK_S32      sequence;
    MppBuffer   buffer;
} CamFrame;

struct CamSource {
    RK_S32              fd;     // Device handle
    RK_U32              bufcnt; // # of buffers
    enum v4l2_buf_type  type;
    MppFrameFormat      fmt;
    CamFrame            fbuf[10];// frame buffers
};

static RK_U32 V4L2_yuv_cfg[MPP_FMT_YUV_BUTT] = {
    V4L2_PIX_FMT_NV12,
    0,
    V4L2_PIX_FMT_NV16,
    0,
    V4L2_PIX_FMT_YVU420,
    V4L2_PIX_FMT_NV21,
    V4L2_PIX_FMT_YUV422P,
    V4L2_PIX_FMT_NV61,
    V4L2_PIX_FMT_YUYV,
    V4L2_PIX_FMT_YVYU,
    V4L2_PIX_FMT_UYVY,
    V4L2_PIX_FMT_VYUY,
    V4L2_PIX_FMT_GREY,
    0,
    0,
    0,
};

static RK_U32 V4L2_RGB_cfg[MPP_FMT_RGB_BUTT - MPP_FRAME_FMT_RGB] = {
    V4L2_PIX_FMT_RGB565,
    0,
    V4L2_PIX_FMT_RGB555,
    0,
    V4L2_PIX_FMT_RGB444,
    0,
    V4L2_PIX_FMT_RGB24,
    V4L2_PIX_FMT_BGR24,
    0,
    0,
    V4L2_PIX_FMT_RGB32,
    V4L2_PIX_FMT_BGR32,
    0,
    0,
};

#define FMT_NUM_PLANES 1

class CameraReader : public threadBase{
private:
    
    CamSource *mCtx = NULL;
    char mDevice[20];
    RK_U32 mBufcnt;
    RK_U32 mWidth;
    RK_U32 mHeight; 
    MppFrameFormat mFmt;
    RkH264Encode *code;

    // Create a new context to capture frames from <fname>. Returns NULL on error.
    RK_S32 Init(const char *device, RK_U32 bufcnt, RK_U32 width, RK_U32 height, MppFrameFormat fmt);
    RK_S32 Open();
    // Stop capturing and free a context.
    MPP_RET Deinit(CamSource *ctx);
    
    RK_S32 Ioctl(RK_S32 fd, RK_S32 req, void* arg);
    // Returns the next captured frame and its meta-data.
    RK_S32 GetFrame(CamSource *ctx);

    // Tells the kernel it's OK to overwrite a frame captured
    MPP_RET PutFrame(CamSource *ctx, RK_S32 idx);

    MppBuffer Frame2Buf(CamSource *ctx, RK_S32 idx);

    bool readyToRun();
    bool threadLoop();
public:
    CameraReader(const char *device, RK_U32 bufcnt, RK_U32 width, RK_U32 height, MppFrameFormat fmt);
    void start();
};
#endif /* __CAMERA_READER_H__ */

main_test

#include <string.h>
#include "rk_mpi.h"

#include "mpp_env.h"
#include "mpp_mem.h"
#include "mpp_time.h"
#include "mpp_debug.h"
#include "mpp_common.h"
#include "CameraReader.h"
int main(){
    CameraReader *camera1 = new CameraReader("/dev/video0", 10, 1920, 1080, MPP_FMT_YUV420SP);
    camera1->start();//线程开始
    while(1){
       usleep(33333);
    }
}

mpp/mpp-develop/test/CMakeLists.txt

....

# new dec multi unit test
add_mpp_test(mpi_dec_multi c)

#add
add_mpp_test(main cpp) 

...


mpp/mpp-develop/util/CMakeLists.txt

# vim: syntax=cmake
# ----------------------------------------------------------------------------
# add libvpu implement
# ----------------------------------------------------------------------------
include_directories(${PROJECT_SOURCE_DIR}/mpp/base/inc)

add_library(utils STATIC
    mpp_enc_roi_utils.c
    mpi_enc_utils.c
    mpi_dec_utils.c
    mpp_opt.c
    utils.c
    iniparser.c
    dictionary.c
    camera_source.c
	CameraReader.cpp
	thread.cpp
	thread.h
    )

target_link_libraries(utils mpp_base)

此类继承于threadBase在我上一片文章有linux下 C++ 封装一个简洁管理方便线程类_hmbbPdx_的博客-CSDN博客

### 回答1: 在RK3399平台上,使用v4l2-ctl命令来控制一个USB接口同时连接多个摄像头是可能的。v4l2-ctl是一个适用于V4L2设备的工具,可以对视频捕获设备进行配置和控制。 首先,我们需要确认RK3399平台上是否支持多个摄像头通过一个USB接口进行连接。一般来说,RK3399平台上的USB控制器应该支持多个USB摄像头设备同时操作。但是,需要注意的是,每个USB接口的带宽是有限的,如果连接太多的摄像头可能会导致数据传输速度降低或产生延迟。 接下来,我们可以通过v4l2-ctl命令来控制每个摄像头。v4l2-ctl命令是一个命令行工具,可以用于设置和查询V4L2设备的属性。在这种情况下,我们可以使用`v4l2-ctl --list-devices`命令来列出系统中连接的所有摄像头设备。 为了控制每个摄像头,我们可以使用相关的v4l2-ctl选项和参数。例如,可以使用`-d`或`--device`选项指定要控制的摄像头设备。此外,还可以使用其他选项和参数来设置摄像头的属性,如亮度、对比度、曝光等。 需要注意的是,由于连接多个摄像头可能会占用较多的系统资源和USB带宽,因此建议合理配置和管理摄像头设备,避免资源冲突和性能问题。此外,在使用v4l2-ctl命令前,需要确保已经正确安装了v4l2-utils软件包。 总的来说,通过在RK3399平台上使用v4l2-ctl命令,我们可以实现通过一个USB接口连接和控制多个摄像头设备。但是需要注意设备带宽和资源管理,以确保系统的稳定性和性能。 ### 回答2: RK3399是一款强大的处理器芯片,支持多媒体应用,包括视频录制和摄像功能。v4l2-ctl是Linux系统上用于控制视频设备的命令行工具。 在RK3399上,可以通过v4l2-ctl命令来控制多个连接到USB接口的摄像头。一般情况下,每个摄像头设备都被认为是一个独立的视频设备,有自己的设备节点。 要使用v4l2-ctl命令控制多个摄像头,首先需要找出每个摄像头设备的设备节点。可以通过ls命令查看/dev目录下的video设备节点,一般以videoX的形式命名,其中X为数字。 接下来,在终端中运行v4l2-ctl命令,指定要控制的摄像头设备节点,格式为v4l2-ctl -d /dev/videoX,其中X为摄像头设备的编号。这样就可以使用v4l2-ctl命令来控制该摄像头的相关参数,例如调整亮度、对比度、分辨率等。 如果想控制多个摄像头,只需分别运行v4l2-ctl命令,指定不同的摄像头设备节点即可。例如,如果有两个摄像头设备节点分别为/dev/video0和/dev/video1,可以同时运行两个v4l2-ctl命令来控制这两个摄像头的参数。 需要注意的是,同时使用多个摄像头可能会增加系统的资源消耗,并可能需要对摄像头进行适当的配置和优化,以确保正常运行。此外,可能还需要使用其他的软件工具或编程库来处理多个摄像头的输入流,以满足具体的应用需求。
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

hmbbPdx_

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值