//这个是友善之臂专题8的源代码,不过没有注释,现在加上了注释,提供下载链接,也可以不下载,对照着这里看。因为这个过程包括采集、硬编码、软解码等过程,
//调用的函数是规范的,所以不是用tiny6410的也可以看看,不是用ARM的也可以看看解码部分!
//我只在tiny6410上尝试,但是因为这里涉及的都是lcd设备节点和摄像头设备节点,基于S3C6410的MFC,所以其他6410应该可以通用
//这些注释有一些是网上复制的【得罪之处请见谅】,但一一对应在源代码上,有一些是自己写上去的。
注释main.cpp 这是注释版,虽然核对了两次,但还是可能出错哦,下载链接在我的资源里【1分,包含整个源代码文件夹,也包含注释】
#include <iostream>
#include <fcntl.h>
#include <sys/ioctl.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <sys/mman.h>
#include <unistd.h>
#include <string.h>
#include <stdlib.h>
#include <string.h>
#include <stdio.h>
#include <linux/types.h>
#include <linux/fb.h>
#include <linux/videodev2.h>
#include <signal.h>
#include <semaphore.h>
#include "SsbSipH264Encode.h"
#include "SsbSipH264Decode.h"
#include "SsbSipMpeg4Decode.h"
#include "SsbSipVC1Decode.h"
#include "FrameExtractor.h"
#include "MPEG4Frames.h"
#include "H263Frames.h"
#include "H264Frames.h"
#include "LogMsg.h"
#include "performance.h"
#include "lcd.h"
#include "MfcDriver.h"
#include "FileRead.h"
#include "s3c_pp.h"
extern void convert_rgb16_to_yuv420(unsigned char *rgb, unsigned char *yuv, int width, int height);
#define MEDIA_FILE_NAME "/tmp/cam_encoding.h264" //yjx changed!!
#define LCD_BPP_V4L2 V4L2_PIX_FMT_RGB565
#define VIDEO_WIDTH 320
#define VIDEO_HEIGHT 240
#define YUV_FRAME_BUFFER_SIZE VIDEO_WIDTH*VIDEO_HEIGHT*2
#define PP_DEV_NAME "/dev/s3c-pp" // 输出的设备在/dev/s3c-pp
#define INPUT_BUFFER_SIZE (204800)
extern int FriendlyARMWidth, FriendlyARMHeight; //在末尾定义,其实是从fb0节点来的
#define FB0_WIDTH FriendlyARMWidth//Friendly的参数将转化为FB0的参数
#define FB0_HEIGHT FriendlyARMHeight//Friendly的参数将转化为FB0的参数
#define FB0_BPP 16 //位深度
#define FB0_COLOR_SPACE RGB16
static void sig_del_h264(int signo);
static unsigned char delimiter_h264[4] = {0x00, 0x00, 0x00, 0x01};
#define INPUT_BUFFER_SIZE (204800)
//这里是标签a
static void *handle; ///MFC的一个关键句柄
static int in_fd; //对应/tmp目录下的.264h文件
static int file_size; //对应.h264文件的大小
static char *in_addr; //.h264文件映射到的内存的起始地址
static int fb_size; //为lcd设备缓冲区的大小
static int pp_fd, fb_fd;//分别对应pp的节点文件,是lcd缓冲区的节点文件
static char *fb_addr; //
static void sig_del_h264(int signo)//在playback中调用,解码相关的退出
{
printf("[H.264 display] signal handling\n");
ioctl(fb_fd, SET_OSD_STOP);
SsbSipH264DecodeDeInit(handle);
munmap(in_addr, file_size);
munmap(fb_addr, fb_size);
close(pp_fd);
close(fb_fd);
close(in_fd);
exit(1);
}
/******************* CAMERA ********************/
class TError
{
public:
TError(const char *msg) {
this->msg = msg;
}
TError(const TError &e) {
msg = e.msg;
}
void Output() {
std::cerr << msg << std::endl;
}
virtual ~TError() {}
protected:
TError &operator=(const TError&);
private:
const char *msg;
};
// Linear memory based image
class TRect//矩形,包含在已经对应frame buffer等的情况下绘制
{
public:
TRect(): Addr(0), Size(0), Width(0), Height(0), LineLen(0), BPP(16) {
}
virtual ~TRect() {
}
bool DrawRect(const TRect &SrcRect, int x, int y) const
{
if (BPP != 16 || SrcRect.BPP != 16) {
// don't support that yet
throw TError("does not support other than 16 BPP yet");
}
// clip
int x0, y0, x1, y1;
x0 = x;
y0 = y;
x1 = x0 + SrcRect.Width - 1;
y1 = y0 + SrcRect.Height - 1;
if (x0 < 0) {
x0 = 0;
}
if (x0 > Width - 1) {
return true;
}
if( x1 < 0) {
return true;
}
if (x1 > Width - 1) {
x1 = Width - 1;
}
if (y0 < 0) {
y0 = 0;
}
if (y0 > Height - 1) {
return true;
}
if (y1 < 0) {
return true;
}
if (y1 > Height - 1) {
y1 = Height -1;
}
//copy
int copyLineLen = (x1 + 1 - x0) * BPP / 8; //总共绘制多少字节
unsigned char *DstPtr = Addr + LineLen * y0 + x0 * BPP / 8;//绘制LCD的初始位置
const unsigned char *SrcPtr = SrcRect.Addr + SrcRect.LineLen *(y0 - y) + (x0 - x) * SrcRect.BPP / 8;//这个是摄像头的buffer对应lcd显示区域的截取的初始位置
for (int i = y0; i <= y1; i++) {
memcpy(DstPtr, SrcPtr, copyLineLen);//从摄像头到lcd内存的转移
DstPtr += LineLen;
SrcPtr += SrcRect.LineLen;
}
return true;
}
bool DrawRect(const TRect &rect) const { // default is Center
return DrawRect(rect, (Width - rect.Width) / 2, (Height - rect.Height) / 2);
}
bool Clear() const {
int i;
unsigned char *ptr;
for (i = 0, ptr = Addr; i < Height; i++, ptr += LineLen) {
memset(ptr, 0, Width * BPP / 8);
}
return true;
}
unsigned char *getAddr()
{
return Addr;
}
protected:
TRect(const TRect&);
TRect &operator=( const TRect&);
protected:
unsigned char *Addr;
int Size;
int Width, Height, LineLen;
unsigned BPP;
};
class TFrameBuffer: public TRect //最终作用:绑定LCD并获取参数。打开/dev/fb0设备,构造函数:一开始就指定frame buffer。且获取了frame的大小参数等,并映射内存到 父Trect 的Addr上,同时Size、Width,Height,LineLen,BPP都会被填充。BPP位深度,当不为16,会错误。
{
public:
TFrameBuffer(const char *DeviceName = "/dev/fb0"): TRect(), fd(-1) {/*i
(前面的数字代码次设备号)
0 = /dev/fb0 First frame buffer
1 = /dev/fb1 Second frame buffer
*/
Addr = (unsigned char *)MAP_FAILED;
fd = open(DeviceName, O_RDWR); //"/dev/fb0"默认
if (fd < 0) {
throw TError("cannot open frame buffer");
}
struct fb_fix_screeninfo Fix;
struct fb_var_screeninfo Var;
if (ioctl(fd, FBIOGET_FSCREENINFO, &Fix) < 0 || ioctl(fd, FBIOGET_VSCREENINFO, &Var) < 0) {
throw TError("cannot get frame buffer information");
}
BPP = Var.bits_per_pixel;
if (BPP != 16) {
throw TError("support 16BPP frame buffer only");
}
Width = Var.xres;
Height = Var.yres;
LineLen = Fix.line_length;
Size = LineLen * Height;
int PageSize = getpagesize();
Size = (Size + PageSize - 1) / PageSize * PageSize ;//让size为PageSize的整数倍,>=原来的size且<=原来的size + PageSize
Addr = (unsigned char *)mmap(NULL, Size, PROT_READ|PROT_WRITE,MAP_SHARED, fd, 0);//frame空间
if (Addr == (unsigned char *)MAP_FAILED) {
throw TError("map frame buffer failed");
return;
}
::close(fd);
fd = -1;
Clear();
}
virtual ~TFrameBuffer()
{
::munmap(Addr, Size);
Addr = (unsigned char *)MAP_FAILED;
::close(fd);
fd = -1;
}
protected:
TFrameBuffer(const TFrameBuffer&);
TFrameBuffer &operator=( const TFrameBuffer&);
private:
int fd;
};
class TVideo : public TRect //最终作用:对应摄像头,获取摄像头参数【包括绑定设备文件指针(通过read调用设备节点返回)】。可以通过设备名“/dev/camera”来打开摄像头,对其操作。和上面的类似。
{
public:
TVideo(const char *DeviceName = "/dev/camera"): TRect(), fd(-1) {//打开摄像头节点,获取大小、位深等参数
Width = VIDEO_WIDTH;
Height = VIDEO_HEIGHT;
BPP = 16;
LineLen = Width * BPP / 8;
Size = LineLen * Height;
Addr = 0;
fd = ::open(DeviceName, O_RDONLY);
if (fd < 0) {
TryAnotherCamera();
}
Addr = new unsigned char[Size]; //建立临时缓冲区
printf("Addr = %p, Size=%ld\n", Addr, Size);
Clear();
}
bool FetchPicture() const{
int count = ::read(fd, Addr, Size); //缓冲区并不是映射的,而是读取的!
if (count != Size) {
throw TError("error in fetching picture from video");
}
return true;
}
virtual ~TVideo() {
::close(fd);
fd = -1;
delete[] Addr;
Addr = 0;
}
protected:
TVideo(const TVideo&);
TVideo &operator=(const TVideo&);
private:
int fd;
void TryAnotherCamera();
};
/* MFC 硬件编码!!!!!!!!!!详细见http://wenku.baidu.com/view/bf7032d33186bceb19e8bbbe.html
Multi Format Codec的缩写,是ARM微处理器内部一种支持多种硬件编码方式的硬件电路,能够编码/解码 MPEG-4/H.263/H.264(30fps)等多种格式的多媒体影像。
TOP6410开发板使用的是ARM11的核,我们现在要利用这个ARM内部的硬件编解码电路来直接对摄像头采集到的图像进行基于硬件的编解码。首 先在项目开始前需要对TOP6410的性能做大体的测试,现在我们使用的是三星提供的测试程序,通过对这个测试程序的分析可以很好地让我们了解基于系统级 的图像编解码函数的调用机制,有利于我们顺利的提取图像并且进行进一步的处理。 要使用MFC,首先要了解如何使用这种机制,在我们的代码中首先要定义一个MFC的句柄(handle),所有的MFC操作都是需要通过传递这个handle作为参数来执行的,它的重要性就跟main 函数差不多,是整个编解码过程的掌舵者。在源码中是这样定义的: static void *handle; */
/*MFC functions encode是编码的意思 分别包含初始化和编码流程和释放
这些函数在这个文件的最后有定义!!!!!!!!!!!!*/
static void *mfc_encoder_init(int width, int height, int frame_rate, int bitrate, int gop_num);
static void *mfc_encoder_exe(void *handle, unsigned char *yuv_buf, int frame_size, int first_frame, long *size);
static void mfc_encoder_free(void *handle);
class TH264Encoder //构造函数默认打开MFC中的初始化并打开媒体文件,也包含完整的一帧数据的采集到保存【rgb16 to yuv420 to MFC(包含和调用h264)】
{
public:
TH264Encoder() {
frame_count = 0;
handle = mfc_encoder_init(VIDEO_WIDTH, VIDEO_HEIGHT, 15, 1000, 15);//构造函数:默认调用MFC中的初始化
if (handle == 0) {
throw TError("cannot init mfc encoder");
}
encoded_fp = fopen(MEDIA_FILE_NAME, "wb+");
if (encoded_fp == 0) {
throw TError("cannot open /tmp/cam_encoding.h264");
}
}
virtual ~TH264Encoder() {
mfc_encoder_free(handle);
fclose(encoded_fp);
}
void Encode(TRect &rect) //最终的编码函数
{
frame_count++; //多了一帧
unsigned char* pRgbData = rect.getAddr();
convert_rgb16_to_yuv420(pRgbData, g_yuv, VIDEO_WIDTH, VIDEO_HEIGHT);//转成yuv420
//Rgb565ToYuv422(pRgbData, VIDEO_WIDTH, VIDEO_HEIGHT, g_yuv);
if(frame_count == 1)
encoded_buf = (unsigned char*)mfc_encoder_exe(handle, g_yuv, YUV_FRAME_BUFFER_SIZE, 1, &encoded_size);
else
encoded_buf = (unsigned char*)mfc_encoder_exe(handle, g_yuv, YUV_FRAME_BUFFER_SIZE, 0, &encoded_size);
fwrite(encoded_buf, 1, encoded_size, encoded_fp);//写入文件
}
protected:
TH264Encoder(const TH264Encoder&);
TH264Encoder &operator=( const TH264Encoder&);
private:
int frame_count;
void* handle;
FILE* encoded_fp;
unsigned char g_yuv[YUV_FRAME_BUFFER_SIZE];
unsigned char *encoded_buf;
long encoded_size;
};
//这里有很多内容是关于播放的,但是由于暂时不相关,先不看,长度达到314行
int playback()
{
FILE* f = fopen(MEDIA_FILE_NAME,"r");
if (f == 0) {
printf("please record first!");
return -1;
}
fclose(f);
void *pStrmBuf;
int nFrameLeng = 0;
unsigned int pYUVBuf[2];
struct stat s;
FRAMEX_CTX *pFrameExCtx; // frame extractor context
FRAMEX_STRM_PTR file_strm;
SSBSIP_H264_STREAM_INFO stream_info;
s3c_pp_params_t pp_param;
s3c_win_info_t osd_info_to_driver;
struct fb_fix_screeninfo lcd_info;
#ifdef FPS
struct timeval start, stop;
unsigned int time = 0;
int frame_cnt = 0;
int mod_cnt = 0;
#endif
if(signal(SIGINT, sig_del_h264) == SIG_ERR) {
printf("Sinal Error\n");
}
// in file open
in_fd = open(MEDIA_FILE_NAME, O_RDONLY); //就是H264文件
if(in_fd < 0) {
printf("Input file open failed\n");
return -1;
}
// get input file size
fstat(in_fd, &s); //将文件相关内容刚在s结构体中
file_size = s.st_size;
// mapping input file to memory
in_addr = (char *)mmap(0, file_size, PROT_READ, MAP_SHARED, in_fd, 0);//文件映射到内存中,函数mmap()返回首地址,in_addr
if(in_addr == NULL) {
printf("input file memory mapping failed\n");
return -1;
}
// Post processor open
pp_fd = open(PP_DEV_NAME, O_RDWR);//S3C6410显示控制器包含这样的逻辑电路:从后处理器(POST Processor)或系统内存视频缓冲数据的本地总线传递数据到外部LCD驱动接口电路的逻辑电路。
if(pp_fd < 0)
{
printf("Post processor open error\n");
return -1;
}
// LCD frame buffer open
fb_fd = open("/dev/fb1", O_RDWR|O_NDELAY); //打开frame buffer,返回文件描述符fb_fd;
if(fb_fd < 0)
{
printf("LCD frame buffer open error\n");
return -1;
}
///
// FrameExtractor Initialization //帧解压初始化//
///
pFrameExCtx = FrameExtractorInit(FRAMEX_IN_TYPE_MEM, delimiter_h264, sizeof(delimiter_h264), 1); /*
函数FrameExtractorInit,初始化帧提取器
FrameExtractorInit(FRAMEX_IN_TYPE_MEM, delimiter_h264, sizeof(delimiter_h264), 1);
参数:
FRAMEX_IN_TYPE_MEM:类型是枚举类型FRAMEX_IN_TYPE,指明输入文件的类型,在此说明是从内存读取文件,还可以是FRAMEX_IN_TYPE_FILE
delimiter_h264:类型是字符数组,有四个成员分别是0x00,0x00,0x00,0x01,与h264编码方式有关;
函数功能:
构建类型FRAMEX_CTX,并利用参数填充,最后返i回其指针pFrameExCtx;*/
file_strm.p_start = file_strm.p_cur = (unsigned char *)in_addr;
//将文件在内存的起始地址,当前地址以及结束地址保存到类型为FRAMEX_STRM_PTR的量file_strm中;
file_strm.p_end = (unsigned char *)(in_addr + file_size);
FrameExtractorFirst(pFrameExCtx, &file_strm); //流文件缓冲区起始及结束
/*
函数FrameExtractorFirst,提取第一个帧?
FrameExtractorFirst(pFrameExCtx, &file_strm)
参数:
pFrameExCtx:初始化函数返回的指针
file_strm:包含文件地址信息
函数功能:
根据pFrameExCtx中保存的数据输入类型,选择进入函数:next_delimiter_mem(pCTX, (FRAMEX_STRM_PTR *) in, NULL, 0, NULL);
#函数next_delimiter_mem
#(FRAMEX_CTX *pCTX, FRAMEX_STRM_PTR *strm_ptr, unsigned char *outbuf, const int outbuf_size, int *n_fill)
#参数:
#pCTX:就是在帧提取器初始化函数返回的量:保存了文件输入类型以及一个4个字符的字符数组
#strm_ptr:保存了文件地址的指针
#函数功能:
#初始化一个字符数组queue[12],容量为12,依次取出源数据的前4个字符与pCTX中的比较,相等的话就将相应字符填充到queue[12] 中,也就是说本次函数执行实现的就是将源数据的前四个个字符内容复制到queue[12]中,同时存储文件地址的量中的当前指针递增。 (strm_ptr->p_cur++)
*/
//
/// 1. Create new instance ///
/// (SsbSipH264DecodeInit) ///
//
handle = SsbSipH264DecodeInit(); /*
函数SsbSipH264DecodeInit()创建一个实例
返回值:
_MFCLIB_H264_DEC类型的指针
函数功能:
打开mfc设备,并将其映射到内存中,构建类型_MFCLIB_H264_DEC,并执行初始化填充,保存设备打开的描述符,内存地址,还有一个_MFCLIB_H264_DEC_MAGIC_NUMBER=0x92241002,
返回指向_MFCLIB_H264_DEC的指针handle*/
if (handle == NULL) {
printf("H264_Dec_Init Failed.\n");
return -1;
}
/
/// 2. Obtaining the Input Buffer ///获取输入buffer
/// (SsbSipH264DecodeGetInBuf) ///
/
pStrmBuf = SsbSipH264DecodeGetInBuf(handle, nFrameLeng);/*
函数SsbSipH264DecodeGetInBuf,获取输入buffer
SsbSipH264DecodeGetInBuf(handle, nFrameLeng)
参数:
handle,创建实例是返回的类型_MFCLIB_H264_DEC,保存mfc设备的内存映射地址
nFrameLeng,初值为0;
函数功能:
构建类型统一体MFC_ARGS,应该是要传递给mfc的一些参数配置,并将mfc内存的首地址保存到此类型中,调用函数ioctl
ioctl(pCTX->hOpen, IOCTL_MFC_GET_LINE_BUF_ADDR, &mfc_args)
类型MFC_ARGS中的结构体MFC_GET_BUF_ADDR_ARG中会保存返回的数据:输出buffer的地址,输出buffer的大小,
返回输出buffer的地址*/
if (pStrmBuf == NULL) {
printf("SsbSipH264DecodeGetInBuf Failed.\n");
SsbSipH264DecodeDeInit(handle);
return -1;
}
// H264 CONFIG stream extraction //
nFrameLeng = ExtractConfigStreamH264(pFrameExCtx, &file_strm, (unsigned char*)pStrmBuf, INPUT_BUFFER_SIZE, NULL);/*
函数ExtractConfigStreamH264//H264 CONFIG stream extraction
ExtractConfigStreamH264(pFrameExCtx, &file_strm, pStrmBuf, INPUT_BUFFER_SIZE, NULL);
参数:
pFrameExCtx:初始化函数FrameExtractorInit返回的指向结构体FRAMEX_CTX的指针
file_strm:类型FRAMEX_STRM_PTR,保存文件映射到内存的首地址和结束地址
pStrmBuf:函数SsbSipH264DecodeGetInBuf返回的输出buffer的地址
INPUT_BUFFER_SIZE:宏定义:204800
结构体:H264_CONFIG_DATA,成员是长和宽,在此为NULL
函数功能://分析源文件获取总共的帧数,并返回nFrameLeng
(1)for循环(100)
(2)调用函数FrameExtractorPeek(pFrameExCtx, fp, frame_type, sizeof(frame_type), (int *)&nFrameSize);
frame_type:容量为10的字符数组,作为输出参数,函数实现就是读取源文件的前十个字符到frame_type中,并相应移动当前指针
(3)取返回数组中的元素frame_type[4]的弟八位,如果不是6,7或8退出函数
(4)函数FrameExtractorNext
#函数next_delimiter_mem*/
/// 3. Configuring the instance with the config stream ///
/// (SsbSipH264DecodeExe) ///
if (SsbSipH264DecodeExe(handle, nFrameLeng) != SSBSIP_H264_DEC_RET_OK) {
printf("H.264 Decoder Configuration Failed.\n");
return -1;
}/*
函数SsbSipH264DecodeExe(handle, nFrameLeng)
参数:
handle:decode实例化函数SsbSipH264DecodeInit()返回的指向类型_MFCLIB_H264_DEC的指针
根据参数对mfc初始化
调用函数ioctl(),会在mfc_args中返回信息,将这些信息传递给handle*/
/
/// 4. Get stream information ///
/
SsbSipH264DecodeGetConfig(handle, H264_DEC_GETCONF_STREAMINFO, &stream_info);
//printf("\t<STREAMINFO> width=%d height=%d.\n", stream_info.width, stream_info.height);//yjx delete 注释
/*
函数SsbSipH264DecodeGetConfig(handle, H264_DEC_GETCONF_STREAMINFO, &stream_info);
获取stream的配置信息
参数:
H264_DEC_GETCONF_STREAMINFO:cmd类型
stream_info:类型是SSBSIP_H264_STREAM_INFO,将handle在函数SsbSipH264DecodeExe中获取的信息传递给此量和一个全局量g_stream_info
*/
// set post processor configuration
pp_param.src_full_width = stream_info.buf_width;
pp_param.src_full_height = stream_info.buf_height;
pp_param.src_start_x = 0;
pp_param.src_start_y = 0;
pp_param.src_width = pp_param.src_full_width;
pp_param.src_height = pp_param.src_full_height;
pp_param.src_color_space = YC420; //MFC decode数据为YUV420格式
pp_param.dst_start_x = 0;
pp_param.dst_start_y = 0;
pp_param.dst_full_width = FB0_WIDTH; // destination width
pp_param.dst_full_height = FB0_HEIGHT; // destination height
pp_param.dst_width = pp_param.dst_full_width;
pp_param.dst_height = pp_param.dst_full_height;
pp_param.dst_color_space = FB0_COLOR_SPACE;
pp_param.out_path = DMA_ONESHOT;/*
设置pp_param,pp是和s3c6410现实相关的
pp_param的类型是结构体s3c_pp_params_t,是对pp的配置
调用ioctl将参数传递给pp*/
ioctl(pp_fd, S3C_PP_SET_PARAMS, &pp_param);
// get LCD frame buffer address
fb_size = pp_param.dst_full_width * pp_param.dst_full_height * 2; // RGB565
fb_addr = (char *)mmap(0, fb_size, PROT_READ | PROT_WRITE, MAP_SHARED, fb_fd, 0);
if (fb_addr == NULL) {
printf("LCD frame buffer mmap failed\n");
return -1;
}/*
计算framebuffer的大小
fb_size = pp_param.dst_full_width * pp_param.dst_full_height * 4; // RGB888
并将fb映射到内存*/
osd_info_to_driver.Bpp = FB0_BPP; // RGB16
osd_info_to_driver.LeftTop_x = 0;
osd_info_to_driver.LeftTop_y = 0;
osd_info_to_driver.Width = FB0_WIDTH; // display width
osd_info_to_driver.Height = FB0_HEIGHT; // display height
// set OSD's information
if(ioctl(fb_fd, SET_OSD_INFO, &osd_info_to_driver)) {
printf("Some problem with the ioctl SET_OSD_INFO\n");
return -1;
}
ioctl(fb_fd, SET_OSD_START);//设置OSD相关信息,此时在屏幕上就会画出一块巨型,准备显示了
while(1)
{/*
完成上面的以后就会进入解码环节:
#进入while(1),每次一帧!*/
#ifdef FPS
gettimeofday(&start, NULL);
#endif
//
/// 5. DECODE ///
/// (SsbSipH264DecodeExe) ///
//
if (SsbSipH264DecodeExe(handle, nFrameLeng) != SSBSIP_H264_DEC_RET_OK)//调用函数SsbSipH264DecodeExe,通知mfc开始解码
break;
//
/// 6. Obtaining the Output Buffer ///
/// (SsbSipH264DecodeGetOutBuf) ///
//
SsbSipH264DecodeGetConfig(handle, H264_DEC_GETCONF_PHYADDR_FRAM_BUF, pYUVBuf);//函数SsbSipH264DecodeGetConfig(handle, H264_DEC_GETCONF_PHYADDR_FRAM_BUF, pYUVBuf),获取解码后的数据,输出buffer;与上面调用的区别是cmdType不同,调用ioctl,会在mfc_args中返回输出buffer的地址和输出buffer的大小
/
// Next H.264 VIDEO stream //获取下一帧//
/
nFrameLeng = NextFrameH264(pFrameExCtx, &file_strm, (unsigned char*)pStrmBuf, INPUT_BUFFER_SIZE, NULL);//#函数NextFrameH264,更新frame
if (nFrameLeng < 4)
break;
// Post processing
// pp_param.SrcFrmSt에는 MFC의 output buffer의 physical address가
// pp_param.DstFrmSt에는 LCD frame buffer의 physical address가 입력으로 넣어야 한다.
pp_param.src_buf_addr_phy = pYUVBuf[0]; // MFC output buffer,将输出buffer的地址传递给pp的配置结构体pp_param
ioctl(pp_fd, S3C_PP_SET_SRC_BUF_ADDR_PHY, &pp_param);
ioctl(fb_fd, FBIOGET_FSCREENINFO, &lcd_info);
pp_param.dst_buf_addr_phy = lcd_info.smem_start; // LCD frame buffer
ioctl(pp_fd, S3C_PP_SET_DST_BUF_ADDR_PHY, &pp_param);
ioctl(pp_fd, S3C_PP_START);//调用ioctl设置pp,后处理并显示
//pp参数设置完毕,启动pp
#ifdef FPS
gettimeofday(&stop, NULL);
time += measureTime(&start, &stop);
frame_cnt++;
mod_cnt++;
if (mod_cnt == 50) {
printf("Average FPS : %u\n", (float)mod_cnt*1000/time);
mod_cnt = 0;
time = 0;
}
#endif
//下一帧解码循环
}
#ifdef FPS
printf("Display Time : %u, Frame Count : %d, FPS : %f\n", time, frame_cnt, (float)frame_cnt*1000/time);
#endif
ioctl(fb_fd, SET_OSD_STOP);
SsbSipH264DecodeDeInit(handle);
munmap(in_addr, file_size);
munmap(fb_addr, fb_size);
close(pp_fd);
close(fb_fd);
close(in_fd);/*
解码结束
deinit decode;
munmap;
close pp fb.*/
return 0;
}
static void print_menu(void)//printf输出界面
{
printf("========= S3C6400/6410 Demo Application ==========\n");
printf("= =\n");
printf("= 1. Record =\n");
printf("= 2. Playback =\n");
printf("= 3. Exit =\n");
printf("= =\n");
printf("==================================================\n");
printf("Select number --> ");
}
static void FBOpen(); //打开lcd设备节点,并获取参数
int main(int argc, char **argv)
{
int num;
FBOpen();
system("clear");
print_menu();
scanf("%d", &num);
fflush(stdin);
if (num == 1) {
try {
struct timeval start,end;
TFrameBuffer FrameBuffer;
TVideo Video;
int timeuse = 0;
int oldTimeUse = 0;
TH264Encoder Encoder;
gettimeofday( &start, NULL );
for (;;) {
Video.FetchPicture();
Encoder.Encode(Video);
FrameBuffer.DrawRect(Video);
gettimeofday( &end, NULL );
timeuse = 1000000 * ( end.tv_sec - start.tv_sec ) + end.tv_usec - start.tv_usec;
timeuse /= 1000000;
if (oldTimeUse != timeuse) {
printf(".\n");
oldTimeUse = timeuse;
}
if (timeuse > 10) {
break;
}
}
printf("\nDone!\n");
} catch (TError &e) {
e.Output();
return 1;
}
} else if (num == 2) {
playback();
} else {
exit(0);
}
return 0;
}
void TVideo::TryAnotherCamera()//尝试其他摄像头,包括打开设备节点的操作,可以尝试修改,试着让其他摄像头兼容
{
int ret, start, found;
struct v4l2_input chan;
struct v4l2_framebuffer preview;
fd = ::open("/dev/video0", O_RDWR);//可以尝试改成video1或video2
if (fd < 0) {
throw TError("cannot open video device");
}
/* Get capability */
struct v4l2_capability cap;
ret = ::ioctl(fd , VIDIOC_QUERYCAP, &cap);
if (ret < 0) {
throw TError("not available device");
}
/* Check the type - preview(OVERLAY) */
if (!(cap.capabilities & V4L2_CAP_VIDEO_OVERLAY)) {
throw TError("not available device");
}
chan.index = 0;
found = 0;
while(1) {
ret = ::ioctl(fd, VIDIOC_ENUMINPUT, &chan);
if (ret < 0) {
throw TError("not available device");
}
if (chan.type &V4L2_INPUT_TYPE_CAMERA ) {
found = 1;
break;
}
chan.index++;
}
if (!found) {
throw TError("not available device");
}
chan.type = V4L2_INPUT_TYPE_CAMERA;
ret = ::ioctl(fd, VIDIOC_S_INPUT, &chan);
if (ret < 0) {
throw TError("not available device");
}
memset(&preview, 0, sizeof preview);
preview.fmt.width = Width;
preview.fmt.height = Height;
preview.fmt.pixelformat = V4L2_PIX_FMT_RGB565;
preview.capability = 0;
preview.flags = 0;
/* Set up for preview */
ret = ioctl(fd, VIDIOC_S_FBUF, &preview);
if (ret< 0) {
throw TError("not available device");
}
/* Preview start */
start = 1;
ret = ioctl(fd, VIDIOC_OVERLAY, &start);
if (ret < 0) {
throw TError("not available device");
}
}
/***************** MFC driver function *****************/
void *mfc_encoder_init(int width, int height, int frame_rate, int bitrate, int gop_num)
//首先要注意的是传入的参数,它们分别定义了每一帧图像的长宽,帧的速度,比特率,GOP(Group of Pictures)策略影响编码质量(设置编码的质量系数)。 函数的作用是对整个MFC的参数进行设置
{
int frame_size;
void *handle;
int ret;
frame_size = (width * height * 3) >> 1;//这里有一个frame_size,有人问为什么要定义成那么大,我们需要的图像每一帧的大小是我们可以自己定义的,我们在代码执行前一般都会开一个缓冲区来存放每一帧的数据,由于我们开的缓冲区给每帧的大小就是那么大,所以这里也好配合我们之前开辟的缓冲区大小进行编码。是这么大吗?
handle = SsbSipH264EncodeInit(width, height, frame_rate, bitrate, gop_num);//这个函数可以说是真正的开始进入编码的初始化过程,现在让我们进去看看。见文件MFC_API/SsbSipH264Encode.c,里面有中文注释。
if (handle == NULL) {
LOG_MSG(LOG_ERROR, "Test_Encoder", "SsbSipH264EncodeInit Failed\n");
return NULL;
}
ret = SsbSipH264EncodeExe(handle);
/*
总结一下我们刚才经过的步骤:
1. 打开设备节点
2. 进行内存到应用的内存映射
3. 初始化关于MFC设备的机构体,并且提供相应的参数
4. 把_MFCLIB_H264_ENC参数传入MFC跟深层次的结构体当中
5. 通过ioctl函数把这些参数传入到内核当中*/
return handle;
}
void *mfc_encoder_exe(void *handle, unsigned char *yuv_buf, int frame_size, int first_frame, long *size) //进行真正的【对应H264】编码工作
{
unsigned char *p_inbuf, *p_outbuf;
int hdr_size;
int ret;
/* 下面将有这些函数
SsbSipH264EncodeGetInBuf(handle, 0);
SsbSipH264EncodeExe(handle);
SsbSipH264EncodeGetConfig(handle, H264_ENC_GETCONF_HEADER_SIZE, &hdr_size);
SsbSipH264EncodeGetOutBuf(handle, size);
看以上这几个函数,其作用通过读函数名字我想就已经非常清楚了,步骤如下:
1. 首先得到输入图像的地址buffer
2. 然后进行编码
3. 第一次的编码需要传入配置参数
4. 得到输出的经过编码的图像的地址(通过内部结构体传递)和大小
*/
p_inbuf = (unsigned char*)SsbSipH264EncodeGetInBuf(handle, 0);
memcpy(p_inbuf, yuv_buf, frame_size);
ret = SsbSipH264EncodeExe(handle);
if (first_frame) {
SsbSipH264EncodeGetConfig(handle, H264_ENC_GETCONF_HEADER_SIZE, &hdr_size);
printf("Header Size : %d\n", hdr_size);//yjx delete注释
}
p_outbuf = (unsigned char*)SsbSipH264EncodeGetOutBuf(handle, size);
return p_outbuf;
}
void mfc_encoder_free(void *handle)
{
SsbSipH264EncodeDeInit(handle);//内部munmap(pCTX->mapped_addr, BUF_SIZE); 其实这就是一个解除映射的过程。
}
#include <fcntl.h>
#include <linux/fb.h>
#include <stdio.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <unistd.h>
#include <math.h>
int FriendlyARMWidth, FriendlyARMHeight;
static void FBOpen(void) //打开lcd设备节点,并获取大小参数
{
struct fb_fix_screeninfo FBFix;
struct fb_var_screeninfo FBVar;
int FBHandle = -1;
FBHandle = open("/dev/fb0", O_RDWR);
if (ioctl(FBHandle, FBIOGET_FSCREENINFO, &FBFix) == -1 ||
ioctl(FBHandle, FBIOGET_VSCREENINFO, &FBVar) == -1) {
fprintf(stderr, "Cannot get Frame Buffer information");
exit(1);
}
FriendlyARMWidth = FBVar.xres;
FriendlyARMHeight = FBVar.yres;
close(FBHandle);
}
还有一个文件不得不提,就算MFC_API下的SsbSipH264Encode.c,因为MFC电路调用了这些关于H264的函数,所以需要分析
#include <stdio.h>
#include <stdlib.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <sys/ioctl.h>
#include <fcntl.h>
#include <ctype.h>
#include <unistd.h>
#include <sys/mman.h>
#include <string.h>
#include <errno.h>
#include <sys/time.h>
#include "MfcDriver.h"
#include "MfcDrvParams.h"
#include "SsbSipH264Encode.h"
#include "LogMsg.h"
#define _MFCLIB_H264_ENC_MAGIC_NUMBER 0x92242002
typedef struct
{
int magic;
int hOpen;
int fInit;
int enc_strm_size;
int enc_hdr_size;
unsigned int width, height;
unsigned int framerate, bitrate;
unsigned int gop_num;
unsigned char *mapped_addr;
} _MFCLIB_H264_ENC;
typedef struct {
int width;
int height;
int frameRateRes;
int frameRateDiv;
int gopNum;
int bitrate;
} enc_info_t;
void *SsbSipH264EncodeInit(unsigned int uiWidth, unsigned int uiHeight,unsigned int uiFramerate, unsigned int uiBitrate_kbps, unsigned int uiGOPNum)
{
_MFCLIB_H264_ENC *pCTX;
int hOpen;
unsigned char *addr;
//
/ CreateFile /
//
hOpen = open(MFC_DEV_NAME, O_RDWR|O_NDELAY); //打开设备节点
if (hOpen < 0) {
LOG_MSG(LOG_ERROR, "SsbSipH264EncodeInit", "MFC Open failure.\n");
return NULL;
}
//
// Mapping the MFC Input/Output Buffer //
//
// mapping shared in/out buffer between application and MFC device driver
addr = (unsigned char *) mmap(0, BUF_SIZE, PROT_READ | PROT_WRITE, MAP_SHARED, hOpen, 0);
if (addr == NULL) {
LOG_MSG(LOG_ERROR, "SsbSipH264EncodeInit", "MFC Mmap failure.\n");
return NULL;
}//这里是进行内存映射,我想做过摄像头项目的人对这个肯定特别有体会,这个函数的作用其实就是把MFC设备工作后写入的那部分内存映射到我的应用程序 开的缓冲区中,也就是说我只要对应用程序中的buffer进行读写其实就是对部分内存的读写。内存映射是linux内核当中一个非常重要的机制,希望能够 引起大家足够多的重视。
pCTX = (_MFCLIB_H264_ENC *) malloc(sizeof(_MFCLIB_H264_ENC));
if (pCTX == NULL) {
LOG_MSG(LOG_ERROR, "SsbSipH264EncodeInit", "malloc failed.\n");
close(hOpen);
return NULL;
}
memset(pCTX, 0, sizeof(_MFCLIB_H264_ENC));
pCTX->magic = _MFCLIB_H264_ENC_MAGIC_NUMBER;
pCTX->hOpen = hOpen;
pCTX->fInit = 0;
pCTX->mapped_addr = addr;
pCTX->width = uiWidth;
pCTX->height = uiHeight;
pCTX->framerate = uiFramerate;
pCTX->bitrate = uiBitrate_kbps;
pCTX->gop_num = uiGOPNum;
pCTX->enc_strm_size = 0;
/*
看到这里我想有必要展示一下这个_MFCLIB_H264_ENC的结构体:
typedef struct {
int magic;
int hOpen;
int fInit;
int enc_strm_size;
int enc_hdr_size;
unsigned int width, height;
unsigned int framerate, bitrate;
unsigned int gop_num;
unsigned char *mapped_addr;
} _MFCLIB_H264_ENC;
这个是为MFC设备定义的结构体,至于这样定义主要是为了能够和内核中的定义进行匹配,相关代码可以参看内核。这里主要是定义了编码需要的参数。 之后的工作就是要把这个结构体填满(定义好各项初始化参数),主要的作用就是完成初始化的工作。到这里SsbSipH264EncodeInit()结束,但是初始化工作并没有完成。 回顾main.cpp中mfc_encoder_init函数中 handle得到了这个函数的返回值也就是和linux对摄像头描述的结构体的指针, handle其实在这里被定义为了指向 _MFCLIB_H264_ENC这个结构体的指针。其实仔细的看你会发现handle这个指针在每一次函数调用过程当中都会指向不同的结构体或者是内存地址,读者可以把它理解成贯穿于整个MFC硬件解码的过程当中的中间 变量,就相当于指向贯穿于我们程序主干部分的指针,通过它可以得到整个硬解码过程的清晰函数结构。*/
return (void *) pCTX;
}
int SsbSipH264EncodeExe(void *openHandle)
{
_MFCLIB_H264_ENC *pCTX;
int r;
MFC_ARGS mfc_args;
// Input Parameter Checking //
if (openHandle == NULL) {
LOG_MSG(LOG_ERROR, "SsbSipH264EncodeExe", "openHandle is NULL\n");
return SSBSIP_H264_ENC_RET_ERR_INVALID_HANDLE;
}
pCTX = (_MFCLIB_H264_ENC *) openHandle;
if (!pCTX->fInit) {
mfc_args.enc_init.in_width = pCTX->width;
mfc_args.enc_init.in_height = pCTX->height;
mfc_args.enc_init.in_bitrate = pCTX->bitrate;
mfc_args.enc_init.in_gopNum = pCTX->gop_num;
mfc_args.enc_init.in_frameRateRes = pCTX->framerate;
mfc_args.enc_init.in_frameRateDiv = 0;
/ (DeviceIoControl) /
/ IOCTL_MFC_H264_DEC_INIT /
r = ioctl(pCTX->hOpen, IOCTL_MFC_H264_ENC_INIT, &mfc_args); /*
这一步其实就是把我们刚才设置好的关于MFC的初始化参数传递到我们内核的驱动程序中,使得驱动程序能够根据我们提供的这些参数对设备进行相应的初始化工作。 在这里IOCTL_MFC_H264_ENC_INIT是制定设备的编码格式,pCTX->hOpen是设备的描述符,mfc_args里转载了MFC所有的参数。完成了这些才算是真正的完成了初始化的工作。*/
if ((r < 0) || (mfc_args.enc_init.ret_code < 0)) {
LOG_MSG(LOG_ERROR, "SsbSipH264EncodeInit", "IOCTL_MFC_H264_ENC_INIT failed.\n");
return SSBSIP_H264_ENC_RET_ERR_ENCODE_FAIL;
}
pCTX->fInit = 1;
return SSBSIP_H264_ENC_RET_OK;
}
/
/ (DeviceIoControl) /
/ IOCTL_MFC_MPEG4_DEC_EXE /
/
r = ioctl(pCTX->hOpen, IOCTL_MFC_H264_ENC_EXE, &mfc_args);
if ((r < 0) || (mfc_args.enc_exe.ret_code < 0)) {
return SSBSIP_H264_ENC_RET_ERR_ENCODE_FAIL;
}
// Encoded stream size is saved. (This value is returned in SsbSipH264EncodeGetOutBuf)已经保存流数据,在调用SsbSipH264EncodeGetInBuf的时候
pCTX->enc_strm_size = mfc_args.enc_exe.out_encoded_size;
if(mfc_args.enc_exe.out_header_size > 0) {
pCTX->enc_hdr_size = mfc_args.enc_exe.out_header_size;
LOG_MSG(LOG_TRACE, "SsbSipH264EncodeExe", "HEADER SIZE = %d\n", pCTX->enc_hdr_size);
}
return SSBSIP_H264_ENC_RET_OK;
}
int SsbSipH264EncodeDeInit(void *openHandle)
{
_MFCLIB_H264_ENC *pCTX;
// Input Parameter Checking //
if (openHandle == NULL) {
LOG_MSG(LOG_ERROR, "SsbSipH264EncodeDeInit", "openHandle is NULL\n");
return SSBSIP_H264_ENC_RET_ERR_INVALID_HANDLE;
}
pCTX = (_MFCLIB_H264_ENC *) openHandle;
munmap(pCTX->mapped_addr, BUF_SIZE);
close(pCTX->hOpen);
return SSBSIP_H264_ENC_RET_OK;
}
void *SsbSipH264EncodeGetInBuf(void *openHandle, long size)
{
_MFCLIB_H264_ENC *pCTX;
int r;
MFC_ARGS mfc_args;
// Input Parameter Checking //
if (openHandle == NULL) {
LOG_MSG(LOG_ERROR, "SsbSipH264EncodeGetInBuf", "openHandle is NULL\n");
return NULL;
}
if ((size < 0) || (size > 0x100000)) {
LOG_MSG(LOG_ERROR, "SsbSipH264EncodeGetInBuf", "size is invalid. (size=%d)\n", size);
return NULL;
}
pCTX = (_MFCLIB_H264_ENC *) openHandle;
/
/ (DeviceIoControl) /
/ IOCTL_MFC_GET_INPUT_BUF_ADDR /
/
mfc_args.get_buf_addr.in_usr_data = (int)pCTX->mapped_addr;
r = ioctl(pCTX->hOpen, IOCTL_MFC_GET_FRAM_BUF_ADDR, &mfc_args);
if ((r < 0) || (mfc_args.get_buf_addr.ret_code < 0)) {
LOG_MSG(LOG_ERROR, "SsbSipH264EncodeGetInBuf", "Failed in get FRAM_BUF address\n");
return NULL;
}
return (void *)mfc_args.get_buf_addr.out_buf_addr;
}
void *SsbSipH264EncodeGetOutBuf(void *openHandle, long *size)
{
_MFCLIB_H264_ENC *pCTX;
int r;
MFC_ARGS mfc_args;
// Input Parameter Checking //
if (openHandle == NULL) {
LOG_MSG(LOG_ERROR, "SsbSipH264EncodeGetOutBuf", "openHandle is NULL\n");
return NULL;
}
if (size == NULL) {
LOG_MSG(LOG_ERROR, "SsbSipH264EncodeGetOutBuf", "size is NULL.\n");
return NULL;
}
pCTX = (_MFCLIB_H264_ENC *) openHandle;
/
/ (DeviceIoControl) /
/ IOCTL_MFC_GET_STRM_BUF_ADDR /
/
mfc_args.get_buf_addr.in_usr_data = (int)pCTX->mapped_addr;
r = ioctl(pCTX->hOpen, IOCTL_MFC_GET_LINE_BUF_ADDR, &mfc_args);
if ((r < 0) || (mfc_args.get_buf_addr.ret_code < 0)) {
LOG_MSG(LOG_ERROR, "SsbSipH264EncodeGetOutBuf", "Failed in get LINE_BUF address.\n");
return NULL;
}
*size = pCTX->enc_strm_size;
return (void *)mfc_args.get_buf_addr.out_buf_addr;
}
int SsbSipH264EncodeSetConfig(void *openHandle, H264_ENC_CONF conf_type, void *value)
{
_MFCLIB_H264_ENC *pCTX;
MFC_ARGS mfc_args;
int r;
unsigned int num_slices[2];
// Input Parameter Checking //
if (openHandle == NULL) {
LOG_MSG(LOG_ERROR, "SsbSipH264EncodeSetConfig", "openHandle is NULL\n");
return SSBSIP_H264_ENC_RET_ERR_INVALID_HANDLE;
}
if (value == NULL) {
LOG_MSG(LOG_ERROR, "SsbSipH264EncodeSetConfig", "value is NULL\n");
return SSBSIP_H264_ENC_RET_ERR_INVALID_PARAM;
}
pCTX = (_MFCLIB_H264_ENC *) openHandle;
switch (conf_type) {
case H264_ENC_SETCONF_NUM_SLICES:
num_slices[0] = ((unsigned int *)value)[0];
num_slices[1] = ((unsigned int *)value)[1];
printf("num slices[0] = %d\n", num_slices[0]);
printf("num slices[1] = %d\n", num_slices[1]);
mfc_args.set_config.in_config_param = MFC_SET_CONFIG_ENC_SLICE_MODE;
mfc_args.set_config.in_config_value[0] = num_slices[0];
mfc_args.set_config.in_config_value[1] = num_slices[1];
r = ioctl(pCTX->hOpen, IOCTL_MFC_SET_CONFIG, &mfc_args);
if ( (r < 0) || (mfc_args.set_config.ret_code < 0) ) {
LOG_MSG(LOG_ERROR, "SsbSipH264EncodeSetConfig", "Error in H264_ENC_SETCONF_NUM_SLICES.\n");
return SSBSIP_H264_ENC_RET_ERR_SETCONF_FAIL;
}
break;
case H264_ENC_SETCONF_PARAM_CHANGE:
mfc_args.set_config.in_config_param = MFC_SET_CONFIG_ENC_PARAM_CHANGE;
mfc_args.set_config.in_config_value[0] = ((unsigned int *) value)[0];
mfc_args.set_config.in_config_value[1] = ((unsigned int *) value)[1];
r = ioctl(pCTX->hOpen, IOCTL_MFC_SET_CONFIG, &mfc_args);
if ( (r < 0) || (mfc_args.set_config.ret_code < 0) ) {
LOG_MSG(LOG_ERROR, "SsbSipH264EncodeSetConfig", "Error in H264_ENC_SETCONF_PARA_CHANGE.\n");
return SSBSIP_H264_ENC_RET_ERR_SETCONF_FAIL;
}
break;
case H264_ENC_SETCONF_CUR_PIC_OPT:
mfc_args.set_config.in_config_param = MFC_SET_CONFIG_ENC_CUR_PIC_OPT;
mfc_args.set_config.in_config_value[0] = ((unsigned int *) value)[0];
mfc_args.set_config.in_config_value[1] = ((unsigned int *) value)[1];
r = ioctl(pCTX->hOpen, IOCTL_MFC_SET_CONFIG, &mfc_args);
if ( (r < 0) || (mfc_args.set_config.ret_code < 0) ) {
LOG_MSG(LOG_ERROR, "SsbSipH264EncodeSetConfig", "Error in H264_ENC_SETCONF_CUR_PIC_OPT.\n");
return SSBSIP_H264_ENC_RET_ERR_SETCONF_FAIL;
}
break;
default:
LOG_MSG(LOG_ERROR, "SsbSipH264EncodeSetConfig", "No such conf_type is supported.\n");
return SSBSIP_H264_ENC_RET_ERR_SETCONF_FAIL;
}
return SSBSIP_H264_ENC_RET_OK;
}
int SsbSipH264EncodeGetConfig(void *openHandle, H264_ENC_CONF conf_type, void *value)
{
_MFCLIB_H264_ENC *pCTX;
// Input Parameter Checking //
if (openHandle == NULL) {
return -1;
}
pCTX = (_MFCLIB_H264_ENC *) openHandle;
switch (conf_type) {
case H264_ENC_GETCONF_HEADER_SIZE:
*((int *)value) = pCTX->enc_hdr_size;
break;
default:
break;
}
return SSBSIP_H264_ENC_RET_OK;
}
如果需要,可以去我的资源找到整个文件夹的下载连接!