关于opencv更改摄像头参数(帧率,分辨率,曝光度……)的几个问题

1,适用于VideoCapture打开的摄像头

VideoCapture capture(0); 设置摄像头参数 不要随意修改

capture.set(CV_CAP_PROP_FRAME_WIDTH, 1080);//宽度

capture.set(CV_CAP_PROP_FRAME_HEIGHT, 960);//高度

capture.set(CV_CAP_PROP_FPS, 30);//帧率 帧/秒

capture.set(CV_CAP_PROP_BRIGHTNESS, 1);//亮度 

capture.set(CV_CAP_PROP_CONTRAST,40);//对比度 40

capture.set(CV_CAP_PROP_SATURATION, 50);//饱和度 50

capture.set(CV_CAP_PROP_HUE, 50);//色调 50

capture.set(CV_CAP_PROP_EXPOSURE, 50);//曝光 50 获取摄像头参数

得到摄像头的参数

capture.get(CV_CAP_PROP_FRAME_WIDTH);

capture.get(CV_CAP_PROP_FRAME_HEIGHT);

capture.get(CV_CAP_PROP_FPS);

capture.get(CV_CAP_PROP_BRIGHTNESS);

capture.get(CV_CAP_PROP_CONTRAST);

capture.get(CV_CAP_PROP_SATURATION);

capture.get(CV_CAP_PROP_HUE);

capture.get(CV_CAP_PROP_EXPOSURE); 获取视频参数:

capture.get(CV_CAP_PROP_FRAME_COUNT);//视频帧数 

然后你会发现除了个别参数你能更改之外(如曝光度),大分布你是不能更改的,甚至都没办法得到,这种并不适用

2,不做开发,只是单纯的更改

那么推荐一个软件,amcap,百度网盘链接,https://pan.baidu.com/s/1pL8nq0V#list/path=%2F,很简单很容易上手。

补,现在突然想起来我的一个学长告诉我的,利用这个软件调节摄像头的曝光度,可以改变帧率,且摄像头会记住曝光度的设置(其他特性就没有这个特点)。-2019.3.12

3,修改opencv的文件,不过效果可能和第一个差不多

大概是在opencv的这个位置,找一下,modules/highgui/src/cap_v4l.cpp,里面有关于参数的设置,位置比较靠前,可以搜索,也可以直接找到

大致在200多行

4,v4l2

下面是我找到的一篇参考,可以突破帧率的限制,当然前提是摄像头支持

https://blog.csdn.net/c406495762/article/details/72732135

目前只适用于Linux系统,本人试验过,120帧的摄像头在只打开摄像头时可以达到100帧左右,设置的图片分辨率越小,能达到的帧率越高

 
  1. #include <unistd.h>

  2. #include <error.h>

  3. #include <errno.h>

  4. #include <fcntl.h>

  5. #include <sys/ioctl.h>

  6. #include <sys/types.h>

  7. #include <pthread.h>

  8. #include <linux/videodev2.h>

  9. #include <sys/mman.h>

  10. #include <opencv2/core/core.hpp>

  11. #include <opencv2/highgui/highgui.hpp>

  12. #include <stdio.h>

  13. #include <stdlib.h>

  14. #include <string.h>

  15. #include <time.h>

  16. #include "opencv2/highgui/highgui.hpp"

  17. #include "opencv2/imgproc/imgproc.hpp"

  18. #include <math.h>

  19. #include <iostream>

  20. #include <iomanip>

  21. #include <string>

  22.  
  23. using namespace std;

  24. using namespace cv;

  25. #define CLEAR(x) memset(&(x), 0, sizeof(x))

  26.  
  27. #define IMAGEWIDTH 3264

  28. #define IMAGEHEIGHT 2448

  29.  
  30. #define WINDOW_NAME1 "【原始图】" //为窗口标题定义的宏

  31. #define WINDOW_NAME2 "【图像轮廓】" //为窗口标题定义的宏

  32.  
  33. Mat g_srcImage; Mat g_grayImage;

  34. int g_nThresh = 90;

  35. int g_nMaxThresh = 255;

  36. RNG g_rng(12345);

  37. Mat g_cannyMat_output;

  38. vector<vector<Point> > g_vContours;

  39. vector<Vec4i> g_vHierarchy;

  40. Point point1[100000];

  41. Point point2[100000];

  42. Point point3[100000];

  43. int ii,iii;

  44. int flag2 = 0;//避障用

  45. float number = 0;

  46. int fps=0;

  47.  
  48.  
  49. class V4L2Capture {

  50. public:

  51. V4L2Capture(char *devName, int width, int height);

  52. virtual ~V4L2Capture();

  53.  
  54. int openDevice();

  55. int closeDevice();

  56. int initDevice();

  57. int startCapture();

  58. int stopCapture();

  59. int freeBuffers();

  60. int getFrame(void **,size_t *);

  61. int backFrame();

  62. static void test();

  63.  
  64. private:

  65. int initBuffers();

  66.  
  67. struct cam_buffer

  68. {

  69. void* start;

  70. unsigned int length;

  71. };

  72. char *devName;

  73. int capW;

  74. int capH;

  75. int fd_cam;

  76. cam_buffer *buffers;

  77. unsigned int n_buffers;

  78. int frameIndex;

  79. };

  80.  
  81. V4L2Capture::V4L2Capture(char *devName, int width, int height) {

  82. // TODO Auto-generated constructor stub

  83. this->devName = devName;

  84. this->fd_cam = -1;

  85. this->buffers = NULL;

  86. this->n_buffers = 0;

  87. this->frameIndex = -1;

  88. this->capW=width;

  89. this->capH=height;

  90. }

  91.  
  92. V4L2Capture::~V4L2Capture() {

  93. // TODO Auto-generated destructor stub

  94. }

  95.  
  96. int V4L2Capture::openDevice() {

  97. /*设备的打开*/

  98. printf("video dev : %s\n", devName);

  99. fd_cam = open(devName, O_RDWR);

  100. if (fd_cam < 0) {

  101. perror("Can't open video device");

  102. }

  103. return 0;

  104. }

  105.  
  106. int V4L2Capture::closeDevice() {

  107. if (fd_cam > 0) {

  108. int ret = 0;

  109. if ((ret = close(fd_cam)) < 0) {

  110. perror("Can't close video device");

  111. }

  112. return 0;

  113. } else {

  114. return -1;

  115. }

  116. }

  117.  
  118. int V4L2Capture::initDevice() {

  119. int ret;

  120. struct v4l2_capability cam_cap; //显示设备信息

  121. struct v4l2_cropcap cam_cropcap; //设置摄像头的捕捉能力

  122. struct v4l2_fmtdesc cam_fmtdesc; //查询所有支持的格式:VIDIOC_ENUM_FMT

  123. struct v4l2_crop cam_crop; //图像的缩放

  124. struct v4l2_format cam_format; //设置摄像头的视频制式、帧格式等

  125.  
  126. /* 使用IOCTL命令VIDIOC_QUERYCAP,获取摄像头的基本信息*/

  127. ret = ioctl(fd_cam, VIDIOC_QUERYCAP, &cam_cap);

  128. if (ret < 0) {

  129. perror("Can't get device information: VIDIOCGCAP");

  130. }

  131. printf(

  132. "Driver Name:%s\nCard Name:%s\nBus info:%s\nDriver Version:%u.%u.%u\n",

  133. cam_cap.driver, cam_cap.card, cam_cap.bus_info,

  134. (cam_cap.version >> 16) & 0XFF, (cam_cap.version >> 8) & 0XFF,

  135. cam_cap.version & 0XFF);

  136.  
  137. /* 使用IOCTL命令VIDIOC_ENUM_FMT,获取摄像头所有支持的格式*/

  138. cam_fmtdesc.index = 0;

  139. cam_fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

  140. printf("Support format:\n");

  141. while (ioctl(fd_cam, VIDIOC_ENUM_FMT, &cam_fmtdesc) != -1) {

  142. printf("\t%d.%s\n", cam_fmtdesc.index + 1, cam_fmtdesc.description);

  143. cam_fmtdesc.index++;

  144. }

  145.  
  146. /* 使用IOCTL命令VIDIOC_CROPCAP,获取摄像头的捕捉能力*/

  147. cam_cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

  148. if (0 == ioctl(fd_cam, VIDIOC_CROPCAP, &cam_cropcap)) {

  149. printf("Default rec:\n\tleft:%d\n\ttop:%d\n\twidth:%d\n\theight:%d\n",

  150. cam_cropcap.defrect.left, cam_cropcap.defrect.top,

  151. cam_cropcap.defrect.width, cam_cropcap.defrect.height);

  152. /* 使用IOCTL命令VIDIOC_S_CROP,获取摄像头的窗口取景参数*/

  153. cam_crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

  154. cam_crop.c = cam_cropcap.defrect; //默认取景窗口大小

  155. if (-1 == ioctl(fd_cam, VIDIOC_S_CROP, &cam_crop)) {

  156. //printf("Can't set crop para\n");

  157. }

  158. } else {

  159. printf("Can't set cropcap para\n");

  160. }

  161.  
  162. /* 使用IOCTL命令VIDIOC_S_FMT,设置摄像头帧信息*/

  163. cam_format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

  164. cam_format.fmt.pix.width = capW;

  165. cam_format.fmt.pix.height = capH;

  166. cam_format.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG; //要和摄像头支持的类型对应

  167. cam_format.fmt.pix.field = V4L2_FIELD_INTERLACED;

  168. ret = ioctl(fd_cam, VIDIOC_S_FMT, &cam_format);

  169. if (ret < 0) {

  170. perror("Can't set frame information");

  171. }

  172. /* 使用IOCTL命令VIDIOC_G_FMT,获取摄像头帧信息*/

  173. cam_format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

  174. ret = ioctl(fd_cam, VIDIOC_G_FMT, &cam_format);

  175. if (ret < 0) {

  176. perror("Can't get frame information");

  177. }

  178. printf("Current data format information:\n\twidth:%d\n\theight:%d\n",

  179. cam_format.fmt.pix.width, cam_format.fmt.pix.height);

  180. ret = initBuffers();

  181. if (ret < 0) {

  182. perror("Buffers init error");

  183. //exit(-1);

  184. }

  185. return 0;

  186. }

  187.  
  188. int V4L2Capture::initBuffers() {

  189. int ret;

  190. /* 使用IOCTL命令VIDIOC_REQBUFS,申请帧缓冲*/

  191. struct v4l2_requestbuffers req;

  192. CLEAR(req);

  193. req.count = 4;

  194. req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

  195. req.memory = V4L2_MEMORY_MMAP;

  196. ret = ioctl(fd_cam, VIDIOC_REQBUFS, &req);

  197. if (ret < 0) {

  198. perror("Request frame buffers failed");

  199. }

  200. if (req.count < 2) {

  201. perror("Request frame buffers while insufficient buffer memory");

  202. }

  203. buffers = (struct cam_buffer*) calloc(req.count, sizeof(*buffers));

  204. if (!buffers) {

  205. perror("Out of memory");

  206. }

  207. for (n_buffers = 0; n_buffers < req.count; n_buffers++) {

  208. struct v4l2_buffer buf;

  209. CLEAR(buf);

  210. // 查询序号为n_buffers 的缓冲区,得到其起始物理地址和大小

  211. buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

  212. buf.memory = V4L2_MEMORY_MMAP;

  213. buf.index = n_buffers;

  214. ret = ioctl(fd_cam, VIDIOC_QUERYBUF, &buf);

  215. if (ret < 0) {

  216. printf("VIDIOC_QUERYBUF %d failed\n", n_buffers);

  217. return -1;

  218. }

  219. buffers[n_buffers].length = buf.length;

  220. //printf("buf.length= %d\n",buf.length);

  221. // 映射内存

  222. buffers[n_buffers].start = mmap(

  223. NULL, // start anywhere

  224. buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd_cam,

  225. buf.m.offset);

  226. if (MAP_FAILED == buffers[n_buffers].start) {

  227. printf("mmap buffer%d failed\n", n_buffers);

  228. return -1;

  229. }

  230. }

  231. return 0;

  232. }

  233.  
  234. int V4L2Capture::startCapture() {

  235. unsigned int i;

  236. for (i = 0; i < n_buffers; i++) {

  237. struct v4l2_buffer buf;

  238. CLEAR(buf);

  239. buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

  240. buf.memory = V4L2_MEMORY_MMAP;

  241. buf.index = i;

  242. if (-1 == ioctl(fd_cam, VIDIOC_QBUF, &buf)) {

  243. printf("VIDIOC_QBUF buffer%d failed\n", i);

  244. return -1;

  245. }

  246. }

  247. enum v4l2_buf_type type;

  248. type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

  249. if (-1 == ioctl(fd_cam, VIDIOC_STREAMON, &type)) {

  250. printf("VIDIOC_STREAMON error");

  251. return -1;

  252. }

  253. return 0;

  254. }

  255.  
  256. int V4L2Capture::stopCapture() {

  257. enum v4l2_buf_type type;

  258. type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

  259. if (-1 == ioctl(fd_cam, VIDIOC_STREAMOFF, &type)) {

  260. printf("VIDIOC_STREAMOFF error\n");

  261. return -1;

  262. }

  263. return 0;

  264. }/*ok*/

  265.  
  266. int V4L2Capture::freeBuffers() {

  267. unsigned int i;

  268. for (i = 0; i < n_buffers; ++i) {

  269. if (-1 == munmap(buffers[i].start, buffers[i].length)) {

  270. printf("munmap buffer%d failed\n", i);

  271. return -1;

  272. }

  273. }

  274. free(buffers);

  275. return 0;

  276. }

  277.  
  278. int V4L2Capture::getFrame(void **frame_buf, size_t* len) {

  279. struct v4l2_buffer queue_buf;

  280. CLEAR(queue_buf);

  281. queue_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

  282. queue_buf.memory = V4L2_MEMORY_MMAP;

  283. if (-1 == ioctl(fd_cam, VIDIOC_DQBUF, &queue_buf)) {

  284. printf("VIDIOC_DQBUF error\n");

  285. return -1;

  286. }

  287. *frame_buf = buffers[queue_buf.index].start;

  288. *len = buffers[queue_buf.index].length;

  289. frameIndex = queue_buf.index;

  290. return 0;

  291. }

  292.  
  293. int V4L2Capture::backFrame() {

  294. if (frameIndex != -1) {

  295. struct v4l2_buffer queue_buf;

  296. CLEAR(queue_buf);

  297. queue_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

  298. queue_buf.memory = V4L2_MEMORY_MMAP;

  299. queue_buf.index = frameIndex;

  300. if (-1 == ioctl(fd_cam, VIDIOC_QBUF, &queue_buf)) {

  301. printf("VIDIOC_QBUF error\n");

  302. return -1;

  303. }

  304. return 0;

  305. }

  306. return -1;

  307. }

  308. void V4L2Capture::test() {

  309. unsigned char *yuv422frame = NULL;

  310. unsigned long yuvframeSize = 0;

  311.  
  312. string videoDev="/dev/video0";

  313. V4L2Capture *vcap = new V4L2Capture(const_cast<char*>(videoDev.c_str()),

  314. 1920, 1080);

  315. vcap->openDevice();

  316. vcap->initDevice();

  317. vcap->startCapture();

  318. vcap->getFrame((void **) &yuv422frame, (size_t *)&yuvframeSize);

  319.  
  320. vcap->backFrame();

  321. vcap->freeBuffers();

  322. vcap->closeDevice();

  323. }

  324. void line2(Point point3[100000], int n)

  325. {

  326. float aa, bb, cc, dd, ee, ff, gg;

  327. int jj = 0;

  328. for (;jj <n;jj++)

  329. {

  330. aa += point3[jj].x*point3[jj].x;

  331. bb += point3[jj].x;

  332. cc += point3[jj].x*point3[jj].y;

  333. dd += point3[jj].y;

  334. }

  335. ee = aa*n - bb*bb;

  336. if ((int)(ee* 100) != 0)

  337. {

  338. ff = (n*cc - bb*dd) / ee;

  339. gg = (dd - bb*ff) / n;

  340. }

  341. else {

  342. ff = 0;

  343. gg = 1;

  344. }

  345. Point point0, pointn;

  346. point0.y = 0;

  347. point0.x = gg;

  348. pointn.y = (n-1);

  349. pointn.x = ((n-1) * ff + gg);

  350.  
  351. Mat draw_ing2 = Mat::zeros(g_cannyMat_output.size(), CV_8UC3);

  352. line(draw_ing2, point0, pointn, (255, 255, 255));

  353. imshow("10", draw_ing2);

  354. //cout << "\n"<<ff <<" "<< gg << endl;

  355. float the =180*atan(ff)/3.14159;

  356. float dis = ff * 160+gg - 160;

  357. cout << the << " " << dis << endl;

  358. //正中心ff=0,gg=160,逆时ff为正,顺时ff为负

  359. }

  360. void findcolor(cv::Mat &image)

  361. {

  362. cv::Mat_<cv::Vec3b>::iterator it = image.begin<cv::Vec3b>();

  363. cv::Mat_<cv::Vec3b>::iterator itend = image.end<cv::Vec3b>();

  364. ii = 0;

  365. iii = 0;

  366. int flagg = 0;

  367. cv::Mat srcX(image.rows, image.cols , CV_32F);

  368. cv::Mat srcY(image.rows, image.cols, CV_32F);

  369. for (int i = 0;i < image.rows;i++)

  370. {

  371. for (int j = 0;j < image.cols;j++)

  372. {

  373. if (flagg == 0)/*这样遍历水平方向无法得到有效数据*/

  374. {

  375.  
  376. if ((*it)[0] == 255 && (*it)[1] == 0 && (*it)[2] == 255)

  377. {

  378. flagg = 1;

  379. point1[ii].x = i;

  380. point1[ii].y = j;

  381. ii++;

  382. }

  383.  
  384. }

  385. else

  386. {

  387. if ((*it)[0] == 255 && (*it)[1] == 0 && (*it)[2] == 255)

  388. {

  389. flagg = 0;

  390. point2[iii].x = i;

  391. point2[iii].y = j;

  392. iii++;

  393. }

  394. }

  395. if (it == itend)

  396. break;

  397. else it++;

  398. }

  399. }

  400. IplImage pImg = IplImage(image);

  401. CvArr* arr = (CvArr*)&pImg;

  402. int nn = ii;

  403. for (;ii > 0;ii--)

  404. {

  405. point3[ii].x = (point1[ii].x + point2[ii].x) / 2;

  406. point3[ii].y = (point1[ii].y + point2[ii].y) / 2;

  407. //circle(image, point3[ii], 1, (255, 255, 255));

  408. cvSet2D(arr, point3[ii].x, point3[ii].y, Scalar(255, 255, 255));

  409. }

  410. line2(point3, nn);

  411. }

  412.  
  413. void on_ThreshChange(int, void* )

  414. {

  415. // 使用Canndy检测边缘

  416. Canny( g_grayImage, g_cannyMat_output, g_nThresh, g_nThresh*2, 3 );

  417.  
  418. // 找到轮廓

  419. findContours( g_cannyMat_output, g_vContours, g_vHierarchy, RETR_TREE, CHAIN_APPROX_SIMPLE, Point(0, 0) );

  420.  
  421. // 计算矩

  422. vector<Moments> mu(g_vContours.size() );

  423. for(unsigned int i = 0; i < g_vContours.size(); i++ )

  424. { mu[i] = moments( g_vContours[i], false ); }

  425.  
  426. // 计算中心矩

  427. vector<Point2f> mc( g_vContours.size() );

  428. for( unsigned int i = 0; i < g_vContours.size(); i++ )

  429. { mc[i] = Point2f( static_cast<float>(mu[i].m10/mu[i].m00), static_cast<float>(mu[i].m01/mu[i].m00 )); }

  430.  
  431. // 绘制轮廓

  432. Mat drawing = Mat::zeros(g_cannyMat_output.size(), CV_8UC3);

  433. for( unsigned int i = 0; i< g_vContours.size(); i++ )

  434. {

  435. //Scalar color = Scalar( g_rng.uniform(0, 255), g_rng.uniform(0,255), g_rng.uniform(0,255) );//随机生成颜色值

  436. Scalar color = Scalar(255, 0, 255);

  437. drawContours( drawing, g_vContours, i, color, 2, 8, g_vHierarchy, 0, Point() );//绘制外层和内层轮廓

  438. circle( drawing, mc[i], 4, color, -1, 8, 0 );;//绘制圆

  439. }

  440.  
  441. findcolor(drawing);

  442. //line1(point1,point2,ii,iii);

  443.  
  444. // 显示到窗口中

  445. // namedWindow( WINDOW_NAME2, WINDOW_AUTOSIZE );

  446. imshow( WINDOW_NAME2, drawing );

  447.  
  448. }

  449.  
  450. void findline(Mat image)

  451. {

  452. cv::Mat_<cv::Vec3b>::iterator it = image.begin<cv::Vec3b>();

  453. cv::Mat_<cv::Vec3b>::iterator itend = image.end<cv::Vec3b>();

  454. for (;it != itend;it++)

  455. {

  456. if ((*it)[1] == 0 && (*it)[2] >= 100)//条件可能需要改变

  457. {

  458. if(flag2==0)

  459. {

  460. flag2 = 1;

  461. cout << "注意line1,避障"<<endl;

  462. //向主控发送消息

  463. }

  464. else

  465. {

  466. cout << "注意line2,避障" << endl;

  467. //向主控发送消息

  468. //避障一与避障二中间要隔一段时间

  469. }

  470.  
  471. }

  472. }

  473. }

  474. void wave(const cv::Mat &image, cv::Mat &result)

  475. {

  476. cv::Mat srcX(image.rows / 2, image.cols / 2, CV_32F);

  477. cv::Mat srcY(image.rows / 2, image.cols / 2, CV_32F);

  478. for (int i = 0;i<image.rows /2;i++)

  479. for (int j = 0;j < image.cols /2;j++)

  480. {

  481. srcX.at<float>(i, j) = 2 * j;

  482. srcY.at<float>(i, j) = 2 * i;

  483. }

  484. cv::remap(image, result, srcX, srcY, cv::INTER_LINEAR);

  485. }

  486.  
  487. void VideoPlayer() {

  488. unsigned char *yuv422frame = NULL;

  489. unsigned long yuvframeSize = 0;

  490.  
  491. string videoDev = "/dev/video0";

  492. V4L2Capture *vcap = new V4L2Capture(const_cast<char*>(videoDev.c_str()), 640, 480);

  493. vcap->openDevice();

  494. vcap->initDevice();

  495. vcap->startCapture();

  496.  
  497. cvNamedWindow("Capture",CV_WINDOW_AUTOSIZE);

  498. IplImage* img;

  499. CvMat cvmat;

  500. double t;

  501. clock_t start, end;

  502. double number=0;

  503. int fps=0;

  504. while(1){

  505. start=clock();

  506. t = (double)cvGetTickCount();

  507. vcap->getFrame((void **) &yuv422frame, (size_t *)&yuvframeSize);

  508. cvmat = cvMat(IMAGEHEIGHT,IMAGEWIDTH,CV_8UC3,(void*)yuv422frame); //CV_8UC3

  509. //解码

  510. img = cvDecodeImage(&cvmat,1);

  511. if(!img){

  512. printf("DecodeImage error!\n");

  513. }

  514.  
  515. cv::Mat g_srcImage = cv::cvarrToMat(img,true);

  516.  
  517. cvShowImage("Capture",img);

  518. cvReleaseImage(&img);

  519. vcap->backFrame();

  520. if((cvWaitKey(1)&255) == 27){

  521. exit(0);

  522. }

  523.  
  524.  
  525.  
  526. wave(g_srcImage, g_srcImage);

  527. findline(g_srcImage);

  528.  
  529. // 把原图像转化成灰度图像并进行平滑

  530. cvtColor(g_srcImage, g_grayImage, COLOR_BGR2GRAY);

  531. blur(g_grayImage, g_grayImage, Size(3, 3));

  532.  
  533.  
  534. //创建滚动条并进行初始化

  535. createTrackbar(" 阈值", WINDOW_NAME1, &g_nThresh, g_nMaxThresh, on_ThreshChange);

  536. on_ThreshChange(0, 0);

  537. t = (double)cvGetTickCount() - t;

  538. printf("Used time is %g ms\n", (t / (cvGetTickFrequency() * 1000)));

  539.  
  540. end =clock();

  541. number=number+end-start;

  542. fps++;

  543. if (number/ CLOCKS_PER_SEC>= 0.25)//windows10 for CLK_TCK

  544. {

  545. cout<<fps<<endl;

  546. fps = 0;

  547. number = 0;

  548. }

  549. }

  550. vcap->stopCapture();

  551. vcap->freeBuffers();

  552. vcap->closeDevice();

  553.  
  554. }

  555.  
  556. int main() {

  557. VideoPlayer();

  558.  
  559. return 0;

  560. }

 

©️2020 CSDN 皮肤主题: 1024 设计师:上身试试 返回首页