海思接收bt1120外同步视频流

15 篇文章 165 订阅 ¥59.90 ¥99.00
本文介绍了BT1120协议的内外同步机制,详细阐述了如何配置海思芯片进行BT1120外同步视频流的接收,并提供了代码示例,包括VI模式配置、外同步设置以及pipe和通道配置,帮助理解出图后的proc信息。
摘要由CSDN通过智能技术生成

一. 什么叫bt1120外同步

1. bt1120协议

在这里插入图片描述

  • BT1120协议通过16位数据传输的,其中8位用来传输亮度,就是Y值,另外8位用来传输色度,就是UV值。
  • 所以接收到的数据图像如果不完整,一般是Y值出了问题,颜色不对,就是UV值不正确。
    从上图中我们可以看到,数据中前面有4字节内容,为 FF 00 00 XY,这4个字节内容,我们把它叫做基准码,其中FF 00 00 是固定的,XY是变化的,用来表示当前数据类型,是有效图像数据还是消隐区数据:
    在这里插入图片描述
    关于有效和消隐,看下图:
  • 5
    点赞
  • 16
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 4
    评论
以下是一个基本的海思输出BT656视频流的代码: ```c #include <stdio.h> #include <stdlib.h> #include <stdint.h> #include <fcntl.h> #include <sys/mman.h> #include <unistd.h> #include <asm/ioctl.h> #include <linux/fb.h> #include <linux/videodev2.h> #define VIDEO_DEVICE "/dev/video0" #define VIDEO_WIDTH 640 #define VIDEO_HEIGHT 480 #define VIDEO_FORMAT V4L2_PIX_FMT_YUYV #define FB_DEVICE "/dev/fb0" int main(void) { int fd_fb = open(FB_DEVICE, O_RDWR); if(fd_fb == -1) { printf("Error: could not open framebuffer device.\n"); return -1; } struct fb_var_screeninfo var_info; if(ioctl(fd_fb, FBIOGET_VSCREENINFO, &var_info) == -1) { printf("Error: could not get variable screen info.\n"); close(fd_fb); return -1; } uint32_t width = var_info.xres; uint32_t height = var_info.yres; uint32_t bytes_per_pixel = var_info.bits_per_pixel / 8; uint32_t fb_size = width * height * bytes_per_pixel; uint8_t *fbp = (uint8_t *)mmap(0, fb_size, PROT_READ | PROT_WRITE, MAP_SHARED, fd_fb, 0); if(fbp == MAP_FAILED) { printf("Error: could not mmap framebuffer device.\n"); close(fd_fb); return -1; } int fd_video = open(VIDEO_DEVICE, O_RDWR); if(fd_video == -1) { printf("Error: could not open video device.\n"); munmap(fbp, fb_size); close(fd_fb); return -1; } struct v4l2_capability cap; if(ioctl(fd_video, VIDIOC_QUERYCAP, &cap) == -1) { printf("Error: could not query video device capabilities.\n"); munmap(fbp, fb_size); close(fd_fb); close(fd_video); return -1; } struct v4l2_format fmt; fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = VIDEO_WIDTH; fmt.fmt.pix.height = VIDEO_HEIGHT; fmt.fmt.pix.pixelformat = VIDEO_FORMAT; fmt.fmt.pix.field = V4L2_FIELD_ANY; if(ioctl(fd_video, VIDIOC_S_FMT, &fmt) == -1) { printf("Error: could not set video format.\n"); munmap(fbp, fb_size); close(fd_fb); close(fd_video); return -1; } struct v4l2_requestbuffers req; req.count = 1; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory = V4L2_MEMORY_MMAP; if(ioctl(fd_video, VIDIOC_REQBUFS, &req) == -1) { printf("Error: could not request video buffers.\n"); munmap(fbp, fb_size); close(fd_fb); close(fd_video); return -1; } struct v4l2_buffer buf; buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = 0; if(ioctl(fd_video, VIDIOC_QUERYBUF, &buf) == -1) { printf("Error: could not query video buffer.\n"); munmap(fbp, fb_size); close(fd_fb); close(fd_video); return -1; } void *video_buffer = mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd_video, buf.m.offset); if(video_buffer == MAP_FAILED) { printf("Error: could not mmap video buffer.\n"); munmap(fbp, fb_size); close(fd_fb); close(fd_video); return -1; } if(ioctl(fd_video, VIDIOC_STREAMON, &buf.type) == -1) { printf("Error: could not start video streaming.\n"); munmap(video_buffer, buf.length); munmap(fbp, fb_size); close(fd_fb); close(fd_video); return -1; } // Write BT656 video data to framebuffer uint8_t *video_data = (uint8_t *)video_buffer; for(uint32_t y = 0; y < height; y++) { for(uint32_t x = 0; x < width; x++) { uint32_t fb_offset = (y * width + x) * bytes_per_pixel; uint32_t video_offset = (y * width + x) * 2; // Convert YCbCr to RGB uint8_t y_data = *(video_data + video_offset); uint8_t cb_data = *(video_data + video_offset + 1); uint8_t cr_data = *(video_data + video_offset + 3); uint8_t r = y_data + 1.402 * (cr_data - 128); uint8_t g = y_data - 0.34414 * (cb_data - 128) - 0.71414 * (cr_data - 128); uint8_t b = y_data + 1.772 * (cb_data - 128); *(fbp + fb_offset) = r; *(fbp + fb_offset + 1) = g; *(fbp + fb_offset + 2) = b; } } if(ioctl(fd_video, VIDIOC_STREAMOFF, &buf.type) == -1) { printf("Error: could not stop video streaming.\n"); } munmap(video_buffer, buf.length); munmap(fbp, fb_size); close(fd_fb); close(fd_video); return 0; } ``` 这个代码假设你的海思芯片已经捕获了BT656格式的视频流,并将其输出到/dev/video0设备上。它将使用 mmap() 函数来映射视频和帧缓冲设备,并将YCbCr格式的视频转换为RGB格式并写入帧缓冲设备。
评论 4
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

lzg2021

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值