基于V4L2的Camera应用程序

/******************************************************

Author: zhaoym
Date: 18/03/2023

******************************************************/


#include "vicam.h"

int camera_dbg_en = 0;

static camera_handle camera;
static unsigned char camera_path[16];

static struct v4l2_capability cap;      /* Query device capabilities */
static struct v4l2_fmtdesc fmtdesc;     /* Enumerate image formats */
static struct v4l2_frmsizeenum frmsize; /* Enumerate frame sizes */
static struct v4l2_frmivalenum ival;	/* Enumerate fps */
static struct v4l2_format fmt;          /* try a format */
static struct v4l2_input inp;           /* select the current video input */
static struct v4l2_streamparm parms;    /* set streaming parameters */
static struct v4l2_requestbuffers req;  /* Initiate Memory Mapping or User Pointer I/O */
static struct v4l2_buffer buf;          /* Query the status of a buffer */

static int is_streaming = 0;

#define NUM_SUPPORT_FMT 36
static int formatNum = 0;
static struct vfe_format support_fmt[NUM_SUPPORT_FMT];
static struct vfe_format sensor_formats[NUM_SUPPORT_FMT];

int low_vi_get_frame(vi_frame_t *frame)
{
	struct timeval tv;
	fd_set fds;
	int ret = 0;
	char source_data_path[128];
	FILE *fp = NULL;

	if (camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
		frame->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
	else
		frame->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	frame->buf.memory = camera.memory;
	if (camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE) {
		frame->buf.length = camera.nplanes;
		frame->buf.m.planes = (struct v4l2_plane *)calloc(camera.nplanes, sizeof(struct v4l2_plane));
	}

	camera.sensorGain = getSensorGain(camera.videofd);
	camera.sensorExp = getSensorExp(camera.videofd);
	/* wait for sensor capture data */
	tv.tv_sec = 2;
	tv.tv_usec = 0;

	FD_ZERO(&fds);
	FD_SET(camera.videofd, &fds);
	ret = select(camera.videofd + 1, &fds, NULL, NULL, &tv);
	if (ret == -1) {
		camera_err(" select error\n");
		return ret;
	} else if (ret == 0) {
		camera_err(" camera%d select timeout,end capture thread!\n", camera.camera_index);
		ret = -1;
		return ret;
	}

	/* dqbuf */
	ret = ioctl(camera.videofd, VIDIOC_DQBUF, &frame->buf);
	if (ret == 0)
		camera_dbg("*****DQBUF[%d] FINISH*****\n", frame->buf.index);
	else
		camera_err("****DQBUF FAIL*****\n");
#if 0
	sprintf(source_data_path, "%s/source_%s_%d.yuv", camera.save_path, get_format_name(camera.pixelformat), np + 1);
	save_frame_to_file(source_data_path, camera.buffers[frame->buf.index].start[0], camera.buffers[frame->buf.index].length[0]);
#endif
	//save data
	frame->buffers.start[0] = camera.buffers[frame->buf.index].start[0];
	frame->buffers.length[0] = camera.buffers[frame->buf.index].length[0];

	/* qbuf */
	if (ioctl(camera.videofd, VIDIOC_QBUF, &frame->buf) == 0)
		camera_dbg("************QBUF[%d] FINISH**************\n", frame->buf.index);
	else
		camera_err("*****QBUF FAIL*****\n");

	camera_dbg(" Capture thread finish\n");

	return ret;
}

int32_t low_vi_free_buf()
{
	return 0;
}

void vi_camera_config()
{
	memset(&camera, 0, sizeof(camera_handle));
	memset(camera_path, 0, sizeof(camera_path));
	
	//camera config
	camera.camera_index = 0;
	camera.isDefault = true;
	camera.sensor_type = -1;
	camera.ispId = -1;
	camera.photo_type = YUV_TYPE;
	camera.win_width = 640;
	camera.win_height = 480;
	camera.fps = 60;
	camera.photo_num = 5;
	camera.pixelformat = V4L2_PIX_FMT_NV21;
	memcpy(camera.save_path, "/tmp", sizeof("/tmp"));
	camera.ToRGB24 = NV21ToRGB24;
	camera.memory = V4L2_MEMORY_MMAP;
	camera.isDefault = 1;
	
	//video node index
	sprintf(camera_path, "%s%d", "/dev/video", camera.camera_index);
}

int32_t vi_querycap()
{
	memset(&cap, 0, sizeof(cap));
	if (ioctl(camera.videofd, VIDIOC_QUERYCAP, &cap) < 0) {
		camera_err(" Query device capabilities fail!!!\n");
	} else {
		camera_dbg(" Querey device capabilities succeed\n");
		camera_dbg(" cap.driver=%s\n", cap.driver);
		camera_dbg(" cap.card=%s\n", cap.card);
		camera_dbg(" cap.bus_info=%s\n", cap.bus_info);
		camera_dbg(" cap.version=0x%08x\n", cap.version);
		camera_dbg(" cap.capabilities=0x%08x\n", cap.capabilities);
	}
	
	if ((cap.capabilities & (V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_VIDEO_CAPTURE_MPLANE)) <= 0) {
		camera_err(" The device is not supports the Video Capture interface!!!\n");
		return -1;
	}
	
	if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE) {
		camera.driver_type = V4L2_CAP_VIDEO_CAPTURE_MPLANE;
	} else if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) {
		camera.driver_type = V4L2_CAP_VIDEO_CAPTURE;
	} else {
		camera_err(" %s is not a capture device.\n", camera_path);
		return -1;
	}
	
	return 0;
}

int32_t vi_enum_fmt()
{
	int index = 0;
	
	//enum all supported format
	camera_dbg(" enumerate image formats\n");
	memset(support_fmt, 0, sizeof(support_fmt));
	fmtdesc.index = 0;
	if (camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
		fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
	else
		fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	while (ioctl(camera.videofd, VIDIOC_ENUM_FMT, &fmtdesc) == 0) {
		/* save image formats */
		if (get_format_name(fmtdesc.pixelformat) != NULL) {
			memcpy(support_fmt[index].name, fmtdesc.description, sizeof(fmtdesc.description));
			support_fmt[index].fourcc = fmtdesc.pixelformat;

			camera_dbg(" format index = %d, name = %s\n", index, get_format_name(fmtdesc.pixelformat));
			index++;
		}
		fmtdesc.index++;
	}
	
	//try all format,here resolution is not important
	int enumFmtIndex = 0;
	formatNum = 0;
	memset(&fmt, 0, sizeof(fmt));
	if (camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE) {
		fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
		fmt.fmt.pix_mp.width = camera.win_width;
		fmt.fmt.pix_mp.height = camera.win_height;
		fmt.fmt.pix_mp.field = V4L2_FIELD_NONE;
	} else {
		fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
		fmt.fmt.pix.width = camera.win_width;
		fmt.fmt.pix.height = camera.win_height;
		fmt.fmt.pix.field = V4L2_FIELD_NONE;
	}
	memset(sensor_formats, 0, sizeof(sensor_formats));

	camera_print("*********************************************************\n");
	camera_print(" The sensor supports the following formats :\n");
	while (support_fmt[enumFmtIndex].fourcc != 0) {
		if (camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
			fmt.fmt.pix_mp.pixelformat = support_fmt[enumFmtIndex].fourcc;
		else
			fmt.fmt.pix.pixelformat = support_fmt[enumFmtIndex].fourcc;

		if (ioctl(camera.videofd, VIDIOC_TRY_FMT, &fmt) == 0) {
			if (camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
				sensor_formats[formatNum].fourcc = fmt.fmt.pix_mp.pixelformat;
			else
				sensor_formats[formatNum].fourcc = fmt.fmt.pix.pixelformat;
			camera_print(" Index %d : %s.\n", formatNum, get_format_name(sensor_formats[formatNum].fourcc));
			formatNum++;
		}
		enumFmtIndex++;
	}
	
	return 0;
}

int32_t vi_check_fmt()
{
	int enumFmtIndex = 0;
	while (sensor_formats[enumFmtIndex].fourcc != 0) {
		memset(&frmsize, 0, sizeof(frmsize));
		frmsize.index = 0;
		frmsize.pixel_format = sensor_formats[enumFmtIndex].fourcc;
		camera_print("**********************************************************\n");
		camera_print(" The %s supports the following resolutions:\n", get_format_name(sensor_formats[enumFmtIndex].fourcc));
		while (ioctl(camera.videofd, VIDIOC_ENUM_FRAMESIZES, &frmsize) == 0) {
			if (frmsize.type == V4L2_FRMSIZE_TYPE_CONTINUOUS) {
				sensor_formats[enumFmtIndex].size[frmsize.index].width = frmsize.stepwise.max_width;
				sensor_formats[enumFmtIndex].size[frmsize.index].height = frmsize.stepwise.max_height;

				camera_print(" Index %d : %u * %u\n", frmsize.index, frmsize.stepwise.max_width, frmsize.stepwise.max_height);
			} else {
				sensor_formats[enumFmtIndex].size[frmsize.index].width = frmsize.discrete.width;
				sensor_formats[enumFmtIndex].size[frmsize.index].height = frmsize.discrete.height;

				camera_print(" Index %d : %u * %u\n", frmsize.index, frmsize.discrete.width, frmsize.discrete.height);
			}

			memset(&ival, 0, sizeof(struct v4l2_frmivalenum));
			ival.index = 0;
			ival.pixel_format = frmsize.pixel_format;
			ival.width = sensor_formats[enumFmtIndex].size[frmsize.index].width;
			ival.height = sensor_formats[enumFmtIndex].size[frmsize.index].height;

			if (ioctl(camera.videofd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) < 0)
				goto enum_frameintervals_failed;

			if (ival.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
				do {
					camera_print("	adding discrete framerate:: %d/%d\n",
						   ival.discrete.denominator, ival.discrete.numerator);
					ival.index++;
				} while (ioctl(camera.videofd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) >= 0);
			} else if (ival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) {
				camera_print("	continuous frame interval %d/%d to %d/%d\n",
					   ival.stepwise.max.denominator, ival.stepwise.max.numerator,
					   ival.stepwise.min.denominator, ival.stepwise.min.numerator);
			}

enum_frameintervals_failed:
			frmsize.index++;
			if (frmsize.index > N_WIN_SIZES - 1) {
				camera_err(" Beyond the maximum queryable range, please modify N_WIN_SIZES.\n");
				break;
			}
		}
		/* next frame */
		enumFmtIndex++;
	}

	int i;
	camera_print("**********************************************************\n");
	camera_print(" Using format parameters %s.\n", get_format_name(camera.pixelformat));
	for (i = 0; sensor_formats[i].fourcc != 0; i++)
		if (camera.pixelformat == sensor_formats[i].fourcc)
			break;

	/* find support pixelformat */
	if (sensor_formats[i].fourcc == 0) {
		camera_err(" sensor not support %s\n", get_format_name(camera.pixelformat));
		camera_print(" Use support for the first format and resolution\n");
		camera.pixelformat = sensor_formats[0].fourcc;
		camera.win_width = sensor_formats[0].size[0].width;
		camera.win_height = sensor_formats[0].size[0].height;

		/*find YUVToRGB func*/
		camera.ToRGB24 = NULL;
		for (int j = 0; j < FOURCC_NUM; j++)
			if (camera.pixelformat == fmt_fourcc[j].fourcc)
				camera.ToRGB24 = fmt_fourcc[j].ToRGB24Func;
	} else {
		int j, sizeindex;

		for (j = 0; sensor_formats[j].fourcc != 0; j++)
			if (camera.pixelformat == sensor_formats[j].fourcc)
				break;

		for (sizeindex = 0; sensor_formats[j].size[sizeindex].width != 0; sizeindex++)
			if (camera.win_width == sensor_formats[j].size[sizeindex].width && camera.win_height == sensor_formats[j].size[sizeindex].height)
				break;

		if (sensor_formats[j].size[sizeindex].width == 0) {
			camera_err(" sensor not support %u * %u\n", camera.win_width, camera.win_height);
			camera_print(" use support for the first resolution\n");
			if (sensor_formats[j].size[0].width != 0 && sensor_formats[j].size[0].height != 0) {
				camera.win_width = sensor_formats[j].size[0].width;
				camera.win_height = sensor_formats[j].size[0].height;
			} else {
				/* mandatory settings */
				camera.win_width = 640;
				camera.win_height = 480;
			}
		}
	}

	camera_dbg(" camera pixelformat: %s\n", get_format_name(camera.pixelformat));
	camera_dbg(" Resolution size : %u * %u\n", camera.win_width, camera.win_height);
	camera_dbg(" The photo save path is %s.\n", camera.save_path);
	camera_dbg(" The number of photos taken is %d.\n", camera.photo_num);

	return 0;
}

int32_t vi_s_input()
{
	memset(&inp, 0, sizeof(inp));
	inp.index = 0;
	inp.type = V4L2_INPUT_TYPE_CAMERA;
	if (ioctl(camera.videofd, VIDIOC_S_INPUT, &inp) < 0) {
		camera_err(" VIDIOC_S_INPUT failed! s_input: %d\n", inp.index);
		return -1;
	}
	
	return 0;
}

int32_t vi_create_ispapi()
{
	/* detect sensor type */
	camera.sensor_type = getSensorType(camera.videofd);
	if (camera.sensor_type == V4L2_SENSOR_TYPE_RAW) {
		camera.ispPort = CreateAWIspApi();
		camera_print(" raw sensor use vin isp\n");
	}
	camera_print("create isp success\n");
	
	return 0;
}

int32_t vi_s_parm()
{
	memset(&parms, 0, sizeof(struct v4l2_streamparm));
	if (camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
		parms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
	else
		parms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	parms.parm.capture.timeperframe.numerator = 1;
	parms.parm.capture.timeperframe.denominator = camera.fps;
	if (ioctl(camera.videofd, VIDIOC_S_PARM, &parms) < 0) {
		camera_err(" Setting streaming parameters failed, numerator:%d denominator:%d\n",
			   parms.parm.capture.timeperframe.numerator,
			   parms.parm.capture.timeperframe.denominator);
		close(camera.videofd);
		return -1;
	}
	
	return 0;
}

int32_t vi_s_fmt()
{
	memset(&fmt, 0, sizeof(struct v4l2_format));
	if (camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE) {
		fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
		fmt.fmt.pix_mp.width = camera.win_width;
		fmt.fmt.pix_mp.height = camera.win_height;
		fmt.fmt.pix_mp.pixelformat = camera.pixelformat;
		fmt.fmt.pix_mp.field = V4L2_FIELD_NONE;
	} else {
		fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
		fmt.fmt.pix.width = camera.win_width;
		fmt.fmt.pix.height = camera.win_height;
		fmt.fmt.pix.pixelformat = camera.pixelformat;
		fmt.fmt.pix.field = V4L2_FIELD_NONE;
	}
	
	if (ioctl(camera.videofd, VIDIOC_S_FMT, &fmt) < 0) {
		camera_err(" setting the data format failed!\n");
		close(camera.videofd);
		return -1;
	}
	
	if (ioctl(camera.videofd, VIDIOC_G_FMT, &fmt) < 0) {
		camera_err(" get the data format failed!\n");
		return -1;
	}
	camera.nplanes = fmt.fmt.pix_mp.num_planes;
	
	return 0;
}

int32_t vi_buffer_handle()
{
	int n_buffers = 0;
	int ret = -1;

	memset(&req, 0, sizeof(struct v4l2_requestbuffers));
	req.count = 3;
	if (camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
		req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
	else
		req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	req.memory = camera.memory;
	if (ioctl(camera.videofd, VIDIOC_REQBUFS, &req) < 0) {
		camera_err(" VIDIOC_REQBUFS failed\n");
		close(camera.videofd);
		return -1;
	}

	camera.buf_count = req.count;
	camera_print(" reqbuf number is %d\n", camera.buf_count);

	camera.buffers = calloc(req.count, sizeof(struct buffer));
	for (n_buffers = 0; n_buffers < req.count; ++n_buffers) {
		memset(&buf, 0, sizeof(struct v4l2_buffer));
		if (camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
			buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
		else
			buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
		buf.memory = camera.memory;
		buf.index = n_buffers;
		if (camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE) {
			buf.length = camera.nplanes;
			buf.m.planes = (struct v4l2_plane *)calloc(buf.length, sizeof(struct v4l2_plane));
		}

		if (ioctl(camera.videofd, VIDIOC_QUERYBUF, &buf) == -1) {
			camera_err(" VIDIOC_QUERYBUF error\n");

			if (camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
				free(buf.m.planes);
			free(camera.buffers);

			close(camera.videofd);

			return -1;
		}
		switch(camera.memory){
		case V4L2_MEMORY_MMAP:
			if (camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE) {
				for (int i = 0; i < camera.nplanes; i++) {
					camera.buffers[n_buffers].length[i] = buf.m.planes[i].length;
					camera.buffers[n_buffers].start[i] = mmap(NULL , buf.m.planes[i].length,
								     PROT_READ | PROT_WRITE, \
								     MAP_SHARED , camera.videofd, \
								     buf.m.planes[i].m.mem_offset);

					camera_print(" map buffer index: %d, mem: %p, len: %x, offset: %x\n",
						n_buffers, camera.buffers[n_buffers].start[i], buf.m.planes[i].length,
							buf.m.planes[i].m.mem_offset);
				}
				free(buf.m.planes);
			}else {
					camera.buffers[n_buffers].length[0] = buf.length;
					camera.buffers[n_buffers].start[0] = mmap(NULL , buf.length,
							     PROT_READ | PROT_WRITE, \
							     MAP_SHARED , camera.videofd, \
							     buf.m.offset);
					camera_print(" map buffer index: %d, mem: %p, len: %x, offset: %x\n", \
					n_buffers, camera.buffers[n_buffers].start[0], buf.length, buf.m.offset);
			}
			break;
		case V4L2_MEMORY_USERPTR:
		case V4L2_MEMORY_DMABUF:
			if(camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE){
				for (int i = 0; i < camera.nplanes; i++) {
					camera.buffers[n_buffers].length[i] = buf.m.planes[i].length;
					camera.buffers[n_buffers].start[i] = (void *)ion_alloc(camera.buffers[n_buffers].length[i]);
					camera.buffers[n_buffers].fd[0] = ion_vir2fd(camera.buffers[n_buffers].start[i]);
				}
                        }else{
					camera.buffers[n_buffers].length[0] = buf.length;
					camera.buffers[n_buffers].start[0] = (void *)ion_alloc(camera.buffers[n_buffers].length[0]);
					camera.buffers[n_buffers].fd[0] = ion_vir2fd(camera.buffers[n_buffers].start[0]);

			}
			break;
		default:
			break;
		}
	}

	for (n_buffers = 0; n_buffers < req.count; n_buffers++) {
		memset(&buf, 0, sizeof(struct v4l2_buffer));
		if (camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
			buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
		else
			buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
		buf.memory = camera.memory;
		buf.index = n_buffers;
		if (camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE) {
			buf.length = camera.nplanes;
			buf.m.planes = (struct v4l2_plane *)calloc(buf.length, sizeof(struct v4l2_plane));
		}
		switch(camera.memory){
			case V4L2_MEMORY_MMAP:
				break;
			case V4L2_MEMORY_USERPTR:
				if (camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE){
					for (int j = 0; j < camera.nplanes; j++){
						buf.m.planes[j].m.userptr = (unsigned long)camera.buffers[n_buffers].start[j];
						buf.m.planes[j].length = camera.buffers[n_buffers].length[j];
					}
				}else{
					buf.m.userptr = (unsigned long)camera.buffers[n_buffers].start[0];
					buf.length = camera.buffers[n_buffers].length[0];
				}
				break;
			case V4L2_MEMORY_DMABUF:
				if (camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE){
                                        for (int j = 0; j < camera.nplanes; j++){
						buf.m.planes[j].m.fd = ion_vir2fd(camera.buffers[n_buffers].start[j]);
                                        }
                                }else{
                                        buf.m.fd = ion_vir2fd(camera.buffers[n_buffers].start[0]);
                                }
                                break;
		}
		if ( (ret = ioctl(camera.videofd, VIDIOC_QBUF, &buf)) == -1) {
			camera_err(" VIDIOC_QBUF error\n");

			if (camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
				free(buf.m.planes);
			free(camera.buffers);

			return -1;
		} else if(ret == 0){
			camera_print("qbuf success!\n");
		}
		if (camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
			free(buf.m.planes);
	}


	return 0;
}

int32_t vi_set_standby_mode(int on)
{
	int ret;
	struct sensor_standby_status stby;

	if(!on) {
		for(int i = 0; i < camera.buf_count; i++) {
			buf.index = i;
			ioctl(camera.videofd, VIDIOC_QBUF, &buf);
		}
	}
	
	memset(&stby, 0, sizeof(struct sensor_standby_status));
	stby.stby_stat = (on)?STBY_ON:STBY_OFF;
	ret = ioctl(camera.videofd, VIDIOC_SET_STANDBY, &stby);
	if(ret < 0) {
		camera_err("set standby mode %d fail!\n", on);
		return -1;
	}
	
	return 0;
}

int32_t vi_init_set_standby()
{
	enum v4l2_buf_type type;
	
	/* streamon first*/
	if (camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
		type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
	else
		type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	if (ioctl(camera.videofd, VIDIOC_STREAMON, &type) == -1) {
		camera_err(" VIDIOC_STREAMON error! %s\n", strerror(errno));
		return -1;
	} else {
		camera_print(" stream on success\n");
	}

	/* setting ISP */
	if (camera.sensor_type == V4L2_SENSOR_TYPE_RAW
	    && !isOutputRawData(camera.pixelformat)) {
		camera.ispId = -1;
		camera.ispId = camera.ispPort->ispGetIspId(camera.camera_index);
		if (camera.ispId >= 0){
			printf("----ready to get ispExp and ispGain\n");
			camera.ispPort->ispStart(camera.ispId);
			camera.ispGain = camera.ispPort->ispGetIspGain(camera.ispId);
                        camera.ispExp = camera.ispPort->ispGetIspExp(camera.ispId);
		}
	}

	/* set standby mode */
	if(vi_set_standby_mode(1) < 0) {
		camera_err("vi init set standby on fail\n");
		return -1;
	}
	

	return 0;
}

int32_t low_vi_init()
{
	camera_print("strat init\n");
	
	//1. config the camera
	vi_camera_config();	
	
	camera_print(" open %s!\n", camera_path);
	camera.videofd = open(camera_path, O_RDWR, 0);
	if (camera.videofd < 0) {
		camera_err(" open %s fail!!!\n", camera_path);
		return -1;
	}
	//query capabilities of the device
	if(vi_querycap() < 0) {
		camera_err("query capability failed!\n");
		return -1;
	}
	if(vi_s_input() < 0) {
		camera_err("set input failed!\n");
		return -1;
	}
	if(vi_create_ispapi() < 0) {
		camera_err("create isp api failed!\n");
		return -1;
	}
	if(vi_enum_fmt() < 0) {
		camera_err("fail to enum fmt\n");
		return -1;
	}
	if(vi_check_fmt() < 0) {
		camera_err("fail to set fmt\n");
		return -1;
	}
	if(vi_s_parm() < 0) {
		camera_err("fail to set stream parm\n");
		return -1;
	}
	//open ion
	ion_alloc_open();
	
	if(vi_s_fmt() < 0) {
		camera_err("fail to set format\n");
		return -1;
	}

	if(vi_buffer_handle() < 0) {
		camera_err("fail to prepare buffer!\n");
		return -1;
	}

	if(vi_init_set_standby() < 0) {
		camera_err("fail to set sensor standby\n");
		return -1;
	}

	return 0;
}

void low_vi_term()
{
	enum v4l2_buf_type type;
	
	/* streamoff */
	if (camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
		type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
	else
		type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	if (ioctl(camera.videofd, VIDIOC_STREAMOFF, &type) == -1) {
		camera_err(" VIDIOC_STREAMOFF error! %s\n", strerror(errno));
	}
	camera_dbg("stream off success\n");

	/* destroy ISP */
	if (camera.sensor_type == V4L2_SENSOR_TYPE_RAW) {
		if (camera.ispId >= 0 && !isOutputRawData(camera.pixelformat))
			camera.ispPort->ispStop(camera.ispId);

		DestroyAWIspApi(camera.ispPort);
		camera.ispPort = NULL;
	}

	/* munmap camera->buffers */
	switch (camera.memory){
                case V4L2_MEMORY_MMAP:
                        if (camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE){
                                for (int i = 0; i < camera.buf_count; ++i) {
                                        for (int j = 0; j < camera.nplanes; j++)
                                                if (munmap(camera.buffers[i].start[j], camera.buffers[i].length[j])) {
                                                        printf("munmap error");
                                                }
                                        }
                        }else{
                                for (int i = 0; i < camera.buf_count; ++i) {
                                        if (munmap(camera.buffers[i].start[0], camera.buffers[i].length[0])) {
                                                 printf("munmap error");
                                        }
                                }
                        }
                        break;
                case V4L2_MEMORY_USERPTR:
			break;
                case V4L2_MEMORY_DMABUF:
			if (camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE){
				for (int i = 0; i < camera.buf_count; ++i) {
					for (int j = 0; j < camera.nplanes; j++) {
						ion_free(camera.buffers[i].start[j]);
					}
				}
			}else{
				 for (int i = 0; i < camera.buf_count; ++i)
					ion_free(camera.buffers[i].start[0]);
			}
                        break;
                default:
                        break;

    }
	if (camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
	free(buf.m.planes);
	free(camera.buffers);
	
	ion_alloc_close();
	close(camera.videofd);
}

int32_t vi_set_hflip(unsigned int hflip_enable)
{
	struct sensor_hflip_cfg hflip;
	
	memset(&hflip, 0, sizeof(struct sensor_hflip_cfg));
	hflip.hflip_enable = hflip_enable;
	if(ioctl(camera.videofd, VIDIOC_SET_HFLIP, &hflip) < 0) {
		camera_err("set hflip fail\n");
		return -1;
	}
	
	return 0;
}

int32_t vi_set_vflip(unsigned int vflip_enable)
{
	struct sensor_vflip_cfg vflip;
	
	memset(&vflip, 0, sizeof(struct sensor_vflip_cfg));
	vflip.vflip_enable = vflip_enable;
	if(ioctl(camera.videofd, VIDIOC_SET_VFLIP, &vflip) < 0) {
		camera_err("set vflip fail\n");
		return -1;
	}

	return 0;
}

int32_t low_vi_stream_on()
{
	if(!is_streaming) {
		if(vi_set_standby_mode(0) < 0) {
			camera_err("streamon error\n");
			return -1;
		}
		camera.ispPort->ispStart(camera.ispId);
	} else {
		camera_err("still streaming, cannot streamon\n");
		return -1;
	}
	camera_dbg("streamon success\n");
	is_streaming = 1;

	return 0;
}

int32_t low_vi_stream_off(void)
{
	if(is_streaming) {
		camera.ispPort->ispStop(camera.ispId);

		if(vi_set_standby_mode(1) < 0) {
			camera_err("streamoff error\n");
			return -1;
		}
	} else {
		camera_err("cannot streamoff!, sensor is not streaming\n");
		return -1;
	}
	camera_dbg("streamoff success\n");
	is_streaming = 0;

	return 0;
}

int vicam_test()
{
	int ret = -1;
	int i;
	vi_frame_t frame;
	char test_file_path[128];

	int n_buffers = 0;
	int index = 0;

	//init sensor
	if(low_vi_init() < 0) {
		camera_err("vi init fail\n");
		low_vi_term();
		return -1;
	}

	//streamon
	if(low_vi_stream_on() < 0) {
		camera_err("stream on fail!\n");
		low_vi_term();
		return -1;
	}
	
	for(i=0; i<10; i++) {
		//get frame
		memset(&frame, 0, sizeof(vi_frame_t));
		memset(test_file_path, 0, sizeof(test_file_path));
		if(low_vi_get_frame(&frame) < 0) {
			camera_err("get frame fail!\n");
			return -1;
		} else {
			sprintf(test_file_path, "%s/source_%s_%d.yuv", camera.save_path, get_format_name(camera.pixelformat), i);
			save_frame_to_file(test_file_path, frame.buffers.start[0], frame.buffers.length[0]);
			camera_dbg("save test file to %s\n", test_file_path);
		}
		
		if((i % 3) == 0) {
			//pause
			low_vi_stream_off();
			sleep(2);
			//resume
			low_vi_stream_on();
		}
	}

	//end, release resources
	low_vi_term();
	
	return 0;
}

int main(int argc, char *argv[])
{
	int ret;

#if 1
	ret = vicam_test();
	if(ret < 0) {
		printf("vicam_test failed!\n");
		return -1;
	}
#endif

	return 0;
}

  • 1
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值