#include<opencv2/opencv.hpp>
using namespace cv;
//..\..\UnityActivate\VSActivateUnity.exe
#define EXPORT_API __declspec(dllexport)
VideoCapture my_cameraRight;
VideoCapture my_cameraLeft;
//Mat image, imageR, imageL;
int m_width = 640;
int m_height = 480;
#if 0 //exe open camera
bool openCamera()
{
bool my_open = false;
while (!my_cameraRight.isOpened() && !my_cameraLeft.isOpened())
{
//std::cout << "Cannot open the camera!" << std::endl;
my_cameraRight.open(0);
my_cameraLeft.open(1);
}
my_cameraRight.set(CV_CAP_PROP_FRAME_WIDTH, m_width);
my_cameraRight.set(CV_CAP_PROP_FRAME_HEIGHT, m_height);
my_cameraLeft.set(CV_CAP_PROP_FRAME_WIDTH, m_width);
my_cameraLeft.set(CV_CAP_PROP_FRAME_HEIGHT, m_height);
if (my_cameraRight.isOpened() && my_cameraLeft.isOpened())
{
my_open = true;
}
return my_open;
}
void recieveFrameRight(uchar* texturePtrRight)
{
Mat my_frameBGR_Right;
Mat my_frameRBG_Right;
my_cameraRight >> my_frameBGR_Right;
if (my_frameBGR_Right.data)
{
cvtColor(my_frameBGR_Right, my_frameRBG_Right, CV_BGR2RGB);
memcpy(texturePtrRight, my_frameRBG_Right.data, my_frameRBG_Right.cols*my_frameRBG_Right.rows*my_frameRBG_Right.channels()*sizeof(uchar));
}
}
void recieveFrameLeft(uchar* texturePtrLeft)
{
Mat my_frameBGR_Left;
Mat my_frameRBG_Left;
my_cameraLeft >> my_frameBGR_Left;
if (my_frameBGR_Left.data)
{
cvtColor(my_frameBGR_Left, my_frameRBG_Left, CV_BGR2RGB);
memcpy(texturePtrLeft, my_frameRBG_Left.data, my_frameRBG_Left.cols*my_frameRBG_Left.rows*my_frameRBG_Left.channels()*sizeof(uchar));
}
}
void closeCamera()
{
if (my_cameraRight.isOpened() && my_cameraLeft.isOpened())
{
my_cameraRight.release();
my_cameraLeft.release();
}
}
void main()
{
if (openCamera())
{
Mat cameraRight, CameraLeft;
my_cameraRight >> cameraRight;
my_cameraLeft >> CameraLeft;
for (;;)
{
imshow("CameraRight", cameraRight);
imshow("CameraLeft", CameraLeft);
if (waitKey(3) == 27 || waitKey(3) == 32)
{
break;
}
}
}
}
void main3()
{
VideoCapture cap_test(0);
Mat image;
if (cap_test.isOpened())
{
for (;;)
{
cap_test >> image;
imshow("camera", image);
waitKey(3);
}
}
}
#endif
#if 10 // dll to U3D
extern "C" bool EXPORT_API openCamera()
{
bool my_open = false;
while (!my_cameraRight.isOpened() && !my_cameraLeft.isOpened())
{
//std::cout << "Cannot open the camera!" << std::endl;
my_cameraRight.open(0);
my_cameraLeft.open(1);
}
if (my_cameraRight.isOpened() && my_cameraLeft.isOpened())
{
my_cameraRight.set(CV_CAP_PROP_FRAME_WIDTH, m_width);
my_cameraRight.set(CV_CAP_PROP_FRAME_HEIGHT, m_height);
my_cameraLeft.set(CV_CAP_PROP_FRAME_WIDTH, m_width);
my_cameraLeft.set(CV_CAP_PROP_FRAME_HEIGHT, m_height);
my_open = true;
}
return my_open;
}
extern "C" void EXPORT_API recieveFrameRight(uchar* texturePtrRight)
{
Mat my_frameBGR_Right;
Mat my_frameRBG_Right;
my_cameraRight >> my_frameBGR_Right;
if (my_frameBGR_Right.data )
{
cvtColor(my_frameBGR_Right, my_frameRBG_Right, CV_BGR2RGB);
memcpy(texturePtrRight, my_frameRBG_Right.data, my_frameRBG_Right.cols*my_frameRBG_Right.rows*my_frameRBG_Right.channels()*sizeof(uchar));
}
}
extern "C" void EXPORT_API recieveFrameLeft( uchar* texturePtrLeft)
{
Mat my_frameBGR_Left;
Mat my_frameRBG_Left;
my_cameraLeft >> my_frameBGR_Left;
if ( my_frameBGR_Left.data)
{
cvtColor(my_frameBGR_Left, my_frameRBG_Left, CV_BGR2RGB);
memcpy(texturePtrLeft, my_frameRBG_Left.data, my_frameRBG_Left.cols*my_frameRBG_Left.rows*my_frameRBG_Left.channels()*sizeof(uchar));
}
}
extern "C" void EXPORT_API closeCamera()
{
if (my_cameraRight.isOpened() && my_cameraLeft.isOpened())
{
my_cameraRight.release();
my_cameraLeft.release();
}
}
#endif
#include<opencv2/opencv.hpp>
#include<string>
using namespace std;
using namespace cv;
//#define EXPORT_API_declspec(dllexport)
int main()
{
//initialize and allocate memory to load the video strem from camera
VideoCapture MyLeftCamera(0);
MyLeftCamera.set(CV_CAP_PROP_FRAME_WIDTH, 640);
MyLeftCamera.set(CV_CAP_PROP_FRAME_HEIGHT, 480);
VideoCapture MyRightCamera(1);
MyRightCamera.set(CV_CAP_PROP_FRAME_WIDTH, 640);
MyRightCamera.set(CV_CAP_PROP_FRAME_HEIGHT, 480);
if (!MyLeftCamera.isOpened()) return 1;
if (!MyRightCamera.isOpened()) return 1;
int number_image = 1;
char * str1;
str1 = ".jpg";
char filenameLeft[20] = "";
char filenameRight[20] = "";
while (true)
{
Mat frame0;
MyLeftCamera >> frame0;
Mat frame1;
MyRightCamera >> frame1;
if (frame0.data&&frame1.data)
{
}
else
{
continue;
}
imshow("Left",frame0);
imshow("Right", frame1);
// waitKey (10);
//保存图片以便于标定程序使用
char mod = (char)waitKey(100);
if (mod == 'c' || mod == 'C')
{
sprintf_s(filenameLeft, "left%d.jpg", number_image);
imwrite(filenameLeft, frame0);
sprintf_s(filenameRight, "right%d.jpg", number_image);
imwrite(filenameRight, frame1);
cout << "成功获取当前帧,并以文件名" << filenameLeft << "保存...\n\n";
cout << "成功获取当前帧,并以文件名" << filenameRight << "保存...\n\n";
printf("按“C“键截取当前帧并保存为标定图片...\n按“Q”键退出截取帧过程...\n\n");
number_image++;
}
else if (mod == 'q' || mod == 'Q')
{
printf("截取图像帧过程完成...\n\n");
cout << "共成功截取" << --number_image << "帧图像!!\n\n";
break;
}
}
return 0;
}
opencv_calib3d2413d.lib
opencv_contrib2413d.lib
opencv_core2413d.lib
opencv_features2d2413d.lib
opencv_flann2413d.lib
opencv_gpu2413d.lib
opencv_highgui2413d.lib
opencv_imgproc2413d.lib
opencv_legacy2413d.lib
opencv_ml2413d.lib
opencv_nonfree2413d.lib
opencv_objdetect2413d.lib
opencv_ocl2413d.lib
opencv_photo2413d.lib
opencv_stitching2413d.lib
opencv_superres2413d.lib
opencv_ts2413d.lib
opencv_video2413d.lib
opencv_videostab2413d.lib
opencv_calib3d2413.lib
opencv_contrib2413.lib
opencv_core2413.lib
opencv_features2d2413.lib
opencv_flann2413.lib
opencv_gpu2413.lib
opencv_highgui2413.lib
opencv_imgproc2413.lib
opencv_legacy2413.lib
opencv_ml2413.lib
opencv_nonfree2413.lib
opencv_objdetect2413.lib
opencv_ocl2413.lib
opencv_photo2413.lib
opencv_stitching2413.lib
opencv_superres2413.lib
opencv_ts2413.lib
opencv_video2413.lib
opencv_videostab2413.lib
#include "opencv2/opencv.hpp"
#include<iostream>
using namespace std;
using namespace cv;
Mat image;
Mat imageCopy; //绘制矩形框时用来拷贝原图的图像
bool leftButtonDownFlag = false; //左键单击后视频暂停播放的标志位
Point originalPoint; //矩形框起点
Point processPoint; //矩形框终点
//鼠标回调函数
void onMouse(int event, int x, int y, int flags, void *ustc)
{
if (event == CV_EVENT_LBUTTONDOWN)
{
leftButtonDownFlag = true; //标志位
originalPoint = Point(x, y); //设置左键按下点的矩形起点
processPoint = originalPoint;
}
if (event == CV_EVENT_MOUSEMOVE && leftButtonDownFlag)
{
imageCopy = image.clone();
processPoint = Point(x, y);
if (originalPoint != processPoint)
{
//在复制的图像上绘制矩形
rectangle(imageCopy, originalPoint, processPoint, Scalar(255, 0, 0), 2);
}
imshow("Cap", imageCopy);
}
if (event == CV_EVENT_LBUTTONUP)
{
leftButtonDownFlag = false;
Mat rectImage = image(Rect(originalPoint, processPoint)); //子图像显示
imshow("Sub Image", rectImage);
}
}
Mat MoveDetect(Mat background, Mat img)
{
//将background和img转为灰度图
Mat result = img.clone();
Mat gray1, gray2;
cvtColor(background, gray1, CV_BGR2GRAY);
cvtColor(img, gray2, CV_BGR2GRAY);
//进行canny边缘检测
//Canny(background, background, 0, 30, 3);
//将background和img做差;对差值图diff进行阈值化处理
Mat diff;
absdiff(gray1, gray2, diff);
//imshow("absdiss", diff);
threshold(diff, diff, 50, 255, CV_THRESH_BINARY);
//imshow("threshold", diff);
//腐蚀膨胀消除噪音
/*
Mat element = getStructuringElement(MORPH_RECT, Size(3, 3));
Mat element2 = getStructuringElement(MORPH_RECT, Size(15, 15));
erode(diff, diff, element);
//imshow("erode", diff);
dilate(diff, diff, element2);
//imshow("dilate", diff);
*/
//二值化后使用中值滤波+膨胀
Mat element = getStructuringElement(MORPH_RECT, Size(11, 11));
medianBlur(diff, diff, 5);//中值滤波
//imshow("medianBlur", diff);
dilate(diff, diff, element);
//blur(diff, diff, Size(10, 10)); //均值滤波
//imshow("dilate", diff);
//查找并绘制轮廓
vector<vector<Point>> contours;
vector<Vec4i> hierarcy;
findContours(diff, contours, hierarcy, CV_RETR_EXTERNAL, CHAIN_APPROX_NONE); //查找轮廓
vector<Rect> boundRect(contours.size()); //定义外接矩形集合
//drawContours(img2, contours, -1, Scalar(0, 0, 255), 1, 8); //绘制轮廓
//查找正外接矩形
int x0 = 0, y0 = 0, w0 = 0, h0 = 0;
double Area = 0, AreaAll = 0;
for (int i = 0; i<contours.size(); i++)
{
boundRect[i] = boundingRect((Mat)contours[i]); //查找每个轮廓的外接矩形
x0 = boundRect[i].x; //获得第i个外接矩形的左上角的x坐标
y0 = boundRect[i].y; //获得第i个外接矩形的左上角的y坐标
w0 = boundRect[i].width; //获得第i个外接矩形的宽度
h0 = boundRect[i].height; //获得第i个外接矩形的高度
//计算面积
double Area = contourArea(contours[i]);//计算第i个轮廓的面积
AreaAll = Area + AreaAll;
//筛选
if (w0>140 && h0>140)
rectangle(result, Point(x0, y0), Point(x0 + w0, y0 + h0), Scalar(0, 255, 0), 2, 8); //绘制第i个外接矩形
//文字输出
Point org(10, 35);
if (i >= 1 && AreaAll >= 19600)
putText(result, "Is Blocked ", org, CV_FONT_HERSHEY_SIMPLEX, 0.8f, Scalar(0, 255, 0), 2);
}
return result;
}
void main()
{
VideoCapture cap;
cap.open(0);
if (!cap.isOpened()) //检查打开是否成功
return;
double fps = cap.get(CV_CAP_PROP_FPS); //获取视频帧率
double pauseTime = 1000 / fps; //两幅画面中间间隔
namedWindow("Cap");
setMouseCallback("Cap", onMouse);
Mat frame;
Mat background;
Mat result;
int count = 0;
while (1)
{
cap >> image;
if (!leftButtonDownFlag) //判定鼠标左键没有按下,采取播放视频,否则暂停
{
cap >> image;
}
if (waitKey(50) == 27) //Esc键按下退出播放
{
break;
}
if (originalPoint != processPoint && !leftButtonDownFlag)
{
rectangle(image, originalPoint, processPoint, Scalar(255, 0, 0), 2);
}
imshow("Cap", image);
//background = image(Rect(originalPoint, processPoint));
cap >> frame;
if (!frame.empty())
{
count++;
if (count == 1)
background = frame.clone(); //提取第一帧为背景帧
//imshow("video", frame);
result = MoveDetect(background, frame);
imshow("result", result);
if (waitKey(50) == 27)
break;
}
else
continue;
}
cap.release();
}
#include "opencv2/opencv.hpp"
#include<iostream>
using namespace std;
using namespace cv;
Mat frame;
Mat imageCopy; //绘制矩形框时用来拷贝原图的图像
bool leftButtonDownFlag = false; //左键单击后视频暂停播放的标志位
Point originalPoint; //矩形框起点
Point processPoint; //矩形框终点
//鼠标回调函数
void onMouse(int event, int x, int y, int flags, void *ustc)
{
if (event == CV_EVENT_LBUTTONDOWN)
{
leftButtonDownFlag = true; //标志位
originalPoint = Point(x, y); //设置左键按下点的矩形起点
processPoint = originalPoint;
}
if (event == CV_EVENT_MOUSEMOVE && leftButtonDownFlag)
{
imageCopy = frame.clone();
processPoint = Point(x, y);
//if (originalPoint != processPoint)
//{
// //在复制的图像上绘制矩形
// rectangle(imageCopy, originalPoint, processPoint, Scalar(255, 0, 0), 2);
//}
imshow("Cap", imageCopy);
}
if (event == CV_EVENT_LBUTTONUP)
{
leftButtonDownFlag = false;
Mat rectImage = frame(Rect(originalPoint, processPoint)); //子图像显示
imshow("Sub Image", rectImage);
}
}
Mat MoveDetect(Mat background, Mat img)
{
//将background和img转为灰度图
Mat result = img.clone();
Mat gray1, gray2;
cvtColor(background, gray1, CV_BGR2GRAY);
cvtColor(img, gray2, CV_BGR2GRAY);
//进行canny边缘检测
//Canny(background, background, 0, 30, 3);
//将background和img做差;对差值图diff进行阈值化处理
Mat diff;
absdiff(gray1, gray2, diff);
//imshow("absdiss", diff);
threshold(diff, diff, 50, 255, CV_THRESH_BINARY);
//imshow("threshold", diff);
//腐蚀膨胀消除噪音
/*
Mat element = getStructuringElement(MORPH_RECT, Size(3, 3));
Mat element2 = getStructuringElement(MORPH_RECT, Size(15, 15));
erode(diff, diff, element);
//imshow("erode", diff);
dilate(diff, diff, element2);
//imshow("dilate", diff);
*/
//二值化后使用中值滤波+膨胀
Mat element = getStructuringElement(MORPH_RECT, Size(11, 11));
medianBlur(diff, diff, 5);//中值滤波
//imshow("medianBlur", diff);
dilate(diff, diff, element);
//blur(diff, diff, Size(10, 10)); //均值滤波
//imshow("dilate", diff);
//查找并绘制轮廓
vector<vector<Point>> contours;
vector<Vec4i> hierarcy;
findContours(diff, contours, hierarcy, CV_RETR_EXTERNAL, CHAIN_APPROX_NONE); //查找轮廓
vector<Rect> boundRect(contours.size()); //定义外接矩形集合
//drawContours(img2, contours, -1, Scalar(0, 0, 255), 1, 8); //绘制轮廓
//查找正外接矩形
int x0 = 0, y0 = 0, w0 = 0, h0 = 0;
double Area = 0, AreaAll = 0;
for (int i = 0; i<contours.size(); i++)
{
boundRect[i] = boundingRect((Mat)contours[i]); //查找每个轮廓的外接矩形
x0 = boundRect[i].x; //获得第i个外接矩形的左上角的x坐标
y0 = boundRect[i].y; //获得第i个外接矩形的左上角的y坐标
w0 = boundRect[i].width; //获得第i个外接矩形的宽度
h0 = boundRect[i].height; //获得第i个外接矩形的高度
//计算面积
double Area = contourArea(contours[i]);//计算第i个轮廓的面积
AreaAll = Area + AreaAll;
//筛选
if (w0>140 && h0>140)
rectangle(result, Point(x0, y0), Point(x0 + w0, y0 + h0), Scalar(0, 255, 0), 2, 8); //绘制第i个外接矩形
//文字输出
Point org(10, 35);
if (i >= 1 && AreaAll >= 19600)
putText(result, "Is Blocked ", org, CV_FONT_HERSHEY_SIMPLEX, 0.8f, Scalar(0, 255, 0), 2);
}
return result;
}
void main()
{
VideoCapture cap;
cap.open(0);
if (!cap.isOpened()) //检查打开是否成功
return;
double fps = cap.get(CV_CAP_PROP_FPS); //获取视频帧率
double pauseTime = 1000 / fps; //两幅画面中间间隔
namedWindow("Cap");
setMouseCallback("Cap", onMouse);
Mat background;
Mat result;
int count = 0;
while (1)
{
cap >> frame;
//if (!leftButtonDownFlag) //判定鼠标左键没有按下,采取播放视频,否则暂停
//{
// cap >> image;
//}
if (waitKey(50) == 27) //Esc键按下退出播放
{
break;
}
if (originalPoint != processPoint && !leftButtonDownFlag)
{
rectangle(frame, originalPoint, processPoint, Scalar(255, 0, 0), 2);
Mat rectImage = frame(Rect(originalPoint, processPoint)); //子图像显示
imshow("Sub Image", rectImage);
}
imshow("Cap", frame);
/*
for (;;)
{
cap >> frame;
rectangle(image, originalPoint, processPoint, Scalar(255, 0, 0), 2);
imshow("Cap", image);
//background = image(Rect(originalPoint, processPoint));
}
*/
//background = image(Rect(originalPoint, processPoint));
//if (!frame.empty())
//{
// count++;
// if (count == 1)
// background = frame.clone(); //提取第一帧为背景帧
// background = image(Rect(originalPoint, processPoint));
// //imshow("video", frame);
// Mat frame1 = frame(Rect(originalPoint, processPoint));
// result = MoveDetect(background, frame1);
// imshow("result", result);
// if (waitKey(50) == 27)
// break;
//}
//else
// continue;
}
cap.release();
}