2021-04-21 全志T7开发经验

目录

 

项目场景:全志T7 开发板+研发板卡调试

问题描述:硬件编码前的数据传输 本博主采用的 全志开发文件 实现数据传输 

问题描述:H265 硬件编码 参数配置问题

全志T7 v4l2-cvbs视频采集

 

 


借鉴此文档:https://www.go2aaron.com/www.go2aaron.com/blog/index.php/archives/65/


项目场景:全志T7 开发板+研发板卡调试

使用全志T7芯片制作视频编解码开发板+LVDS显示功能

问题描述:硬件编码前的数据传输 本博主采用的 全志开发文件 实现数据传输 

在文件 sunxiMemInterface中介绍了两种方式实现内存地址的映射。

为啥要用这来创建内存地址呢?那就是全志几乎所有地址都采用的是物理地址。这文件中的接口实现了物理地址与虚拟的的映射。

在paramStruct这个结构体中就用phy ,vir来表时物理地址和虚拟地址

实现方法


问题描述:H264 硬件编码 参数配置问题

全志硬件编码参数设置问题1 h264编码参数设置头文件 vencoder.h

本博主配置:

这样配置后编码保存后的图像还是无法播放ffplay 播放时出现无法找到pps sps等关键信息需要实现如下功能获取sps,pps关键信息:

将获取到达sps,pps,信息放在每一帧数据的头保存后就可以实现全志芯片H265图像的编码

编码代码地址:https://download.csdn.net/download/qq_38542509/16854081

全志硬件编码 H265 参数配置

/*全志硬件H265 参数配置*/
case VENC_CODEC_H265:
    {
        VencH265Param param;
        param.idr_period = 1;
        param.sProfileLevel.nProfile = VENC_H265ProfileMain10;
        param.sProfileLevel.nLevel = VENC_H265Level31;
        param.bLongTermRef = ;
        param.sQPRange.nMaxqp = 40;
        param.sQPRange.nMinqp = 10;
        param.nFramerate = 60;
        param.nBitrate = 8*1024*1024;
        param.nIntraPeriod = 10;
        param.nGopSize = ;
        param.nQPInit = 10;
#if 1
        param.sRcParam.eRcMode = AW_VBR;
        param.sRcParam.uStatTime = 0;
        param.sRcParam.uInputFrmRate = 30;
        param.sRcParam.uOutputFrmRate = 60;
        param.sRcParam.uMaxBitRate = 8*1024*1024;
        param.sRcParam.uMinIprop = 10;
        param.sRcParam.uMaxIprop = 40;
        param.sRcParam.nMaxReEncodeTimes = 10;
        param.sRcParam.bQpMapEn = 1;
        param.sRcParam.nMinStaticPercent = 10;
        param.sRcParam.uMaxStaticIQp = 20;
        param.sRcParam.uMinIQp = ;10
        param.sRcParam.uMaxIQp = 40;
#endif
#if 0
        param.sGopParam.bUseGopCtrlEn = 1;
        param.sGopParam.eGopMode = AW_NORMALP;
        param.sGopParam.nVirtualIFrameInterval = 120;
        param.sGopParam.nSpInterval = 120;
        param.sGopParam.sRefParam.bAdvancedRefEn = 0;
        param.sGopParam.sRefParam.nBase = 0;
        param.sGopParam.sRefParam.nEnhance = 0;
        param.sGopParam.sRefParam.bRefBaseEn = 0;
#endif
        VideoEncSetParameter(pvideo_encoder->pvideoencoder, VENC_IndexParamH265Param, &param);
        VencH265TranS h265trans;
        h265trans.transform_skip_enabled_flag = 0;
        VideoEncSetParameter(pvideo_encoder->pvideoencoder, VENC_IndexParamH265Trans, &h265trans);
        VencH265SaoS h265saos;
        h265saos.slice_sao_chroma_flag = 0;
        h265saos.slice_sao_luma_flag = 0;
        VideoEncSetParameter(pvideo_encoder->pvideoencoder, VENC_IndexParamH265Sao, &h265saos);
        VencH265DblkS h265blks;
        h265blks.slice_deblocking_filter_disabled_flag = 0;
        h265blks.slice_beta_offset_div2 = 0;
        h265blks.slice_tc_offset_div2 = 0;
        VideoEncSetParameter(pvideo_encoder->pvideoencoder, VENC_IndexParamH265Dblk, &h265blks);
        VencH265TimingS h265timings;

        VideoEncSetParameter(pvideo_encoder->pvideoencoder, VENC_IndexParamH265Timing, &h265timings);
        break;
    }
    

H265编码文件头处理

/*H265 头获取*/
VencHeaderData sps_pps_data;
VideoEncGetParameter(pvideo_encoder->pvideoencoder, VENC_IndexParamH265Header, &sps_pps_data);

注意:H265/H264编码都要在编码数据后添加数据头,其他播放器才能识别文件信息播放

JPEG编码参数配置

exifinfo参数配置:

exifinfo->ThumbWidth = 720;
    exifinfo->ThumbHeight = 576;
    sprintf(exifinfo->CameraMake,"TY-T7 make test");
    sprintf(exifinfo->CameraModel,"TY-T7 mode test");
    sprintf(exifinfo->DateTime, "%d:%d:%d %d:%d:%d", ptm->tm_year + 1900, ptm->tm_mon + 1, ptm->tm_mday, ptm->tm_hour, ptm->tm_min, ptm->tm_sec);
    sprintf(exifinfo->gpsProcessingMethod,"TY-T7 GPS");

    exifinfo->Orientation = 0;

    exifinfo->ExposureTime.num = 2;
    exifinfo->ExposureTime.den = 1000;

    exifinfo->FNumber.num = 20;
    exifinfo->FNumber.den = 10;
    exifinfo->ISOSpeed = 50;

    exifinfo->ExposureBiasValue.num= -4;
    exifinfo->ExposureBiasValue.den= 1;

    exifinfo->MeteringMode = 1;
    exifinfo->FlashUsed = 0;

exifinfo->FocalLength.num = 1400;
    exifinfo->FocalLength.den = 100;

    exifinfo->DigitalZoomRatio.num = 4;
    exifinfo->DigitalZoomRatio.den = 1;

    exifinfo->WhiteBalance = 1;
    exifinfo->ExposureMode = 1;

    exifinfo->enableGpsInfo = 1;

    exifinfo->gps_latitude = 23.2368;
    exifinfo->gps_longitude = 24.3244;
    exifinfo->gps_altitude = 1234.5;

    exifinfo->gps_timestamp = (long)time(NULL);
strcpy((char*)exifinfo->CameraSerialNum,  "123456789");
    strcpy((char*)exifinfo->ImageName,  "exif-name-test");
    strcpy((char*)exifinfo->ImageDescription,  "exif-descriptor-test");

 

全志T7 v4l2-cvbs视频采集

注意:全志开发过程中的地址几乎都是在说物理地址

所以还是要用到上面说的paramStruct结构体

1.V4L2采集程序就要看看驱动或者全志官方提供的SDK 里面有个demo  V4l2CameraDevide.cpp V4L2CameraDevice.h 代码如何实现了。本博主用的自己的方式直接上代码:

首先:打开对应的设备号: /dev/video4

   再查询v4l2_capability 满足条件就开始配置参数

struct v4l2_capability capability;
ret = ioctl(pv4l2->fd, VIDIOC_QUERYCAP, &capability);
    if(ret != 0){
        DBG(DBG_ERR,"get capbility error\n");
        return -EINVAL;
    }
if (capability.capabilities & V4L2_BUF_TYPE_VIDEO_CAPTURE){
    //配置V4L2 参数
}

需要配置宽度高度等信息

设置V4L2_input 模式

配置v4l2_fmt信息:

struct v4l2_format video_fmt = { 0 };
video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

        video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        video_fmt.fmt.pix.width = pv4l2->width;
        video_fmt.fmt.pix.height = pv4l2->height;
        video_fmt.fmt.pix.field = V4L2_FIELD_NONE;
        video_fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_NV21;//pix_fmt;
        video_fmt.fmt.pix.bytesperline = pv4l2->width;
        video_fmt.fmt.pix.sizeimage = pv4l2->width * pv4l2->height;
        //video_fmt.fmt.pix.colorspace = V4L2_COLORSPACE_SMPTE170M;
        ret = ioctl(pv4l2->fd, VIDIOC_S_FMT, &video_fmt);
        if (ret < 0) {
            DBG(DBG_ERR, "set tvin image format failed: %s  ret = %d\n", strerror(errno), ret);
            return -1;
        }

设置v4l2_streamparm

设置v4l2_requestbuffers

struct v4l2_requestbuffers req_buffer = { 0 };
//req buffer
        req_buffer.count = 8;
        req_buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
#if CAPTURE_USE_USERPTR
        req_buffer.memory = V4L2_MEMORY_USERPTR;
#else
        req_buffer.memory = V4L2_MEMORY_MMAP;
#endif

        ret = ioctl(pv4l2->fd, VIDIOC_REQBUFS, &req_buffer);
        if (ret < 0) {
            DBG(DBG_ERR, "request capture buffer from driver %s failed %d\n", pv4l2->dev, ret);
            return -ENOMEM;
        }

因为全志出的数据是物理地址所有要用CAPTURE_USE_USERPTR下的方式

配置完成后就开始配置采集的buffer

DBG(DBG_INFO, "request buffer count = %d\n", req_buffer.count);
        for (i = 0; i < req_buffer.count; i++) {
            struct video_frame* frame;
            frame = (struct video_frame*)malloc(sizeof(struct video_frame));
            frame->width = pv4l2->width;
            frame->height = pv4l2->height;
            frame->stride = pv4l2->width;
            frame->index = i;

            memset(&frame->v4l2_buf, 0, sizeof(struct v4l2_buffer));
            if (!frame) {
                DBG(DBG_ERR, "malloc frame with error\n");
                continue;
            }
            frame->v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
#if CAPTURE_USE_USERPTR
            frame->v4l2_buf.memory = V4L2_MEMORY_USERPTR;
#else
            frame->v4l2_buf.memory = V4L2_MEMORY_MMAP;
#endif

            frame->v4l2_buf.index = i;

            ret = ioctl(pv4l2->fd, VIDIOC_QUERYBUF, &frame->v4l2_buf);
            if (ret < 0) {
                DBG(DBG_ERR, "query driver buffer %d failed %d\n", i, ret);
                continue;
            }
#if CAPTURE_USE_USERPTR
            for (j = 0; j < 2; j++) {
                frame->virt_addr[i] = pv4l2->paramstruct.vir;
                frame->v4l2_buf.m.userptr = (uint32_t)frame->virt_addr[i];
            }
#else
            frame->index = frame->v4l2_buf.index;
            frame->length = frame->v4l2_buf.length;
            frame->time = frame->v4l2_buf.timestamp;
            frame->virt_addr[0] = mmap(NULL, frame->v4l2_buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, pv4l2->fd, frame->v4l2_buf.m.offset);
            if (frame->virt_addr[0] == MAP_FAILED) {
                DBG(DBG_ERR, "mmap %s capture plane buffer %d failed\n", pv4l2->dev, i);
                perror("mmap");
            }
#endif
            frame->owner = pv4l2;
            pv4l2->op->put_sync_frame(pv4l2, frame);
        }     
    

完整模块的代码:https://download.csdn.net/download/qq_38542509/16875068


  • 0
    点赞
  • 5
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值