#include "stdafx.h"
#include "cv.h" // includes OpenCV definitions
#include "highgui.h" // includes highGUI definitions
#include "cvcam.h"
#include <iostream>
#include <stdio.h>// includes C standard input/output definitions
using namespace std;
// various tracking parameters (in seconds)
const double MHI_DURATION = 1;
const double MAX_TIME_DELTA = 0.5;
const double MIN_TIME_DELTA = 0.05;
// number of cyclic frame buffer used for motion detection
// (should, probably, depend on FPS)
const int N = 4;
// ring image buffer
IplImage **buf = 0;
int last = 0;
// temporary images
IplImage *mhi = 0; // MHI
IplImage *orient = 0; // orientation
IplImage *mask = 0; // valid orientation mask
IplImage *segmask = 0; // motion segmentation map
CvMemStorage* storage = 0; // temporary storage
IplImage* abs_image = 0;
IplImage* add_abs_image = 0;
IplImage* abs_images[3];
IplImage* grey =0;
IplImage* pre_grey = 0;
IplImage* dst = 0;
CvSeq* contour = 0;
int test( IplImage* src,IplImage* pre_src );
int _tmain(int argc, _TCHAR* argv[])
{
// Determine the number of available cameras
int numCameras = cvcamGetCamerasCount() ;
cout << "=========================================" << endl ;
cout << "== Located devices: =>" << numCameras << "<=" << endl ;
cout << "=========================================" << endl ;
// Make sure that a camera is attached
if( numCameras == 0 ) {
getchar();
return -1;
}
IplImage* pre_image = 0;
IplImage* image = 0;
int frame_count =0;
CvCapture * capture=cvCaptureFromCAM( CV_CAP_ANY );
if( capture )
{
printf( "=> OK/n");
}
else
{
fprintf(stderr,"ERROR: capture is NULL /n");
getchar();
return -1;
}
cvNamedWindow( "Source", 1 );
cvNamedWindow( "Components", 1 );
for(;;)
{
if( !cvGrabFrame( capture ))
break;
image = cvRetrieveFrame( capture );
if (!pre_image)
{
pre_image = cvCreateImage( cvGetSize(image), 8, 3 );
abs_image = cvCreateImage( cvGetSize(image), 8, 1 );
add_abs_image = cvCreateImage( cvGetSize(image), 8, 1 );
grey = cvCreateImage( cvGetSize(image), 8, 1 );
pre_grey = cvCreateImage( cvGetSize(image), 8, 1 );
dst = cvCreateImage( cvGetSize(image), 8, 3 );
abs_image->origin = image->origin;
add_abs_image->origin = image->origin;
dst->origin = image->origin;
storage = cvCreateMemStorage(0);
abs_images[0] = cvCreateImage( cvGetSize(image), 8, 1 );
abs_images[1] = cvCreateImage( cvGetSize(image), 8, 1 );
abs_images[2] = cvCreateImage( cvGetSize(image), 8, 1 );
abs_images[3] = cvCreateImage( cvGetSize(image), 8, 1 );
cvZero(abs_images[0]);
cvZero(abs_images[1]);
cvZero(abs_images[2]);
cvZero(abs_images[3]);
}
frame_count++;
test(image,pre_image);
cvCopy(abs_images[frame_count%3],abs_image,0);
cvCopy(image,pre_image,0);
if( cvWaitKey(10) >= 0 )
break;
}
cvcamStop();
cvcamExit();
// Release the capture device housekeeping
cvDestroyWindow("src");
cvReleaseCapture(&capture);
return 0;
}
int test( IplImage* src,IplImage* pre_src )
{
cvCvtColor(src, grey, CV_BGR2GRAY);
cvZero(add_abs_image);
cvCvtColor(pre_src, pre_grey, CV_BGR2GRAY);
cvAbsDiff( grey,pre_grey, abs_image );
cvThreshold( abs_image, abs_image, 20, 255, CV_THRESH_BINARY );
cvAdd(abs_images[0],abs_image,add_abs_image);
cvThreshold( add_abs_image, add_abs_image, 10, 255, CV_THRESH_BINARY );
cvAdd(abs_images[1],add_abs_image,add_abs_image);
cvThreshold( add_abs_image, add_abs_image, 10, 255, CV_THRESH_BINARY );
cvAdd(abs_images[2],add_abs_image,add_abs_image);
cvThreshold( add_abs_image, add_abs_image, 10, 255, CV_THRESH_BINARY );
cvAdd(abs_images[3],add_abs_image,add_abs_image);
cvThreshold( add_abs_image, add_abs_image, 10, 255, CV_THRESH_BINARY );
//cvWaitKey(0);
cvFindContours( abs_image, storage, &contour, sizeof(CvContour), CV_RETR_CCOMP, CV_CHAIN_APPROX_SIMPLE );
cvZero( dst );
for( ; contour != 0; contour = contour->h_next )
{
CvScalar color = CV_RGB( 255, 255, 255 );
/* replace CV_FILLED with 1 to see the outlines */
cvDrawContours( dst, contour, color, color, -1, CV_FILLED, 8 );
}
//cvNamedWindow( "Source", 1 );
cvShowImage( "Source", src );
//cvNamedWindow( "Components", 1 );
cvShowImage( "Components", dst );
//cvWaitKey(0);
return 0;
}