opencv3.0机器学习之SVM使用(二类线性不可分)

之前找到一些代码都是2.X版本的,很多都不能运行,我大致修改了一下,全部能跑动了,2.x版本都注释了,下面替换为新的,可以对照修改。
/******************************* 
** 作者: 周小小 
** 描述: 
*******************************/
#include <iostream>  
#include <opencv2/core/core.hpp>  
#include <opencv2/highgui/highgui.hpp>  
#include <opencv2/ml/ml.hpp>  
#include <opencv2/opencv.hpp>
#define NTRAINING_SAMPLES   100         // Number of training samples per class  
#define FRAC_LINEAR_SEP     0.9f        // Fraction of samples which compose the linear separable part  

using namespace cv;
using namespace std;


int main()
{
	// Data for visual representation  
	const int WIDTH = 512, HEIGHT = 512;
	Mat I = Mat::zeros(HEIGHT, WIDTH, CV_8UC3);

	//--------------------- 1. Set up training data randomly --------------------------------------- 
    Mat trainData(2 * NTRAINING_SAMPLES, 2, CV_32FC1);//Float 32bits
	Mat labels(2 * NTRAINING_SAMPLES, 1, CV_32SC1);//Signed 32bits 有符号的
	//为了方便起见,将这个函数的代码的其他部分删除了。
	//首先解释一下:samples就是训练的数据。response就是标签。
	//通过上面,我们知道再来用Mat的时候,只能用CV_32F和CV_32S。
	//由于labels是正负一所以为有符号的。
	RNG rng(100); // Random value generation class  

	// Set up the linearly separable part of the training data  
	int nLinearSamples = (int)(FRAC_LINEAR_SEP * NTRAINING_SAMPLES);

	// Generate random points for the class 1  
	Mat trainClass = trainData.rowRange(0, nLinearSamples);
	// The x coordinate of the points is in [0, 0.4)  
	Mat c = trainClass.colRange(0, 1);
	rng.fill(c, RNG::UNIFORM, Scalar(1), Scalar(0.4 * WIDTH));
	// The y coordinate of the points is in [0, 1)  
	c = trainClass.colRange(1, 2);
	rng.fill(c, RNG::UNIFORM, Scalar(1), Scalar(HEIGHT));

	// Generate random points for the class 2  
	trainClass = trainData.rowRange(2 * NTRAINING_SAMPLES - nLinearSamples, 2 * NTRAINING_SAMPLES);
	// The x coordinate of the points is in [0.6, 1]  
	c = trainClass.colRange(0, 1);
	rng.fill(c, RNG::UNIFORM, Scalar(0.6*WIDTH), Scalar(WIDTH));
	// The y coordinate of the points is in [0, 1)  
	c = trainClass.colRange(1, 2);
	rng.fill(c, RNG::UNIFORM, Scalar(1), Scalar(HEIGHT));

	//------------------ Set up the non-linearly separable part of the training data ---------------  

	// Generate random points for the classes 1 and 2  
	trainClass = trainData.rowRange(nLinearSamples, 2 * NTRAINING_SAMPLES - nLinearSamples);
	// The x coordinate of the points is in [0.4, 0.6)  
	c = trainClass.colRange(0, 1);
	rng.fill(c, RNG::UNIFORM, Scalar(0.4*WIDTH), Scalar(0.6*WIDTH));
	// The y coordinate of the points is in [0, 1)  
	c = trainClass.colRange(1, 2);
	rng.fill(c, RNG::UNIFORM, Scalar(1), Scalar(HEIGHT));

	//------------------------- Set up the labels for the classes ---------------------------------  
	labels.rowRange(0, NTRAINING_SAMPLES).setTo(1);  // Class 1  
	labels.rowRange(NTRAINING_SAMPLES, 2 * NTRAINING_SAMPLES).setTo(2);  // Class 2  

	//------------------------ 2. Set up the support vector machines parameters --------------------  
	cv::Ptr<cv::ml::SVM> svm = cv::ml::SVM::create();
	//CvSVMParams params;
	svm->setType(cv::ml::SVM::Types::C_SVC);//类型
	//params.svm_type = SVM::C_SVC;
	//params.C = 0.1;
	svm->setC(0.1);
	//params.kernel_type = SVM::LINEAR;
	svm->setKernel(cv::ml::SVM::KernelTypes::LINEAR);//核函数类型
	//params.term_crit = TermCriteria(CV_TERMCRIT_ITER, (int)1e7, 1e-6);
	svm->setTermCriteria(cv::TermCriteria(cv::TermCriteria::MAX_ITER, (int)1e7, 1e-6));//算法终止条件

	//------------------------ 3. Train the svm ----------------------------------------------------  
	cout << "Starting training process" << endl;
	/*for (int i = 0; i<trainData.rows; i++) {
		for (int j = 0; j<trainData.cols; j++) {
			cout << (int)trainData.at<float>(i, j) << endl;
		}

	}*/
	//CvSVM svm;
	//svm.train(trainData, labels, Mat(), Mat(), params);
	svm->train(trainData, cv::ml::SampleTypes::ROW_SAMPLE, labels);
	//svm->train(trainData, cv::ml::SampleTypes::ROW_SAMPLE, labels);
	cout << "Finished training process" << endl;

	//------------------------ 4. Show the decision regions ----------------------------------------  
	Vec3b green(0, 100, 0), blue(100, 0, 0);
	for (int i = 0; i < I.rows; ++i)
		for (int j = 0; j < I.cols; ++j)
		{
			Mat sampleMat = (Mat_<float>(1, 2) << i, j);
			float response = svm->predict(sampleMat);

			if (response == 1)    I.at<Vec3b>(j, i) = green;
			else if (response == 2)    I.at<Vec3b>(j, i) = blue;
		}

	//----------------------- 5. Show the training data --------------------------------------------  
	int thick = -1;
	int lineType = 8;
	float px, py;
	// Class 1  
	for (int i = 0; i < NTRAINING_SAMPLES; ++i)
	{
		px = trainData.at<float>(i, 0);
		py = trainData.at<float>(i, 1);
		circle(I, Point((int)px, (int)py), 3, Scalar(0, 255, 0), thick, lineType);
	}
	// Class 2  
	for (int i = NTRAINING_SAMPLES; i <2 * NTRAINING_SAMPLES; ++i)
	{
		px = trainData.at<float>(i, 0);
		py = trainData.at<float>(i, 1);
		circle(I, Point((int)px, (int)py), 3, Scalar(255, 0, 0), thick, lineType);
	}

	//------------------------- 6. Show support vectors --------------------------------------------  
	thick = 2;
	lineType = 8;
	//int x = svm.get_support_vector_count();
	cv::Mat sv = svm->getSupportVectors();
	for (int i = 0; i <sv.rows; ++i)
	{
		const float* v = sv.ptr<float>(i);
		circle(I, Point((int)v[0], (int)v[1]), 6, Scalar(128, 128, 128), thick, lineType);
	}

	imwrite("result.png", I);                      // save the Image  
	imshow("Two classes of linear nonseparable problems", I); // show it to the user  
	waitKey(0);
}

  • 1
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值