机器学习-采用正态贝叶斯分类器对wine分类

1.数据集的准备

我采用UCI中的Wine Data Set

下载地址:http://download.csdn.net/download/tiankong_/10120450

数据描述:

第一列为类属性 ,用1,2,3表示,后面13列为特征属性,分别为Alcohol,Malicacid,Ash,Alcalinity of ash,Magnesium,Total phenols,Flavanoids,Nonflavanoid phenols,Proanthocyanins,Color intensity,Hue,OD280/OD315 of diluted wines,Proline

2.代码实例

#include "opencv2/ml/ml.hpp"
#include "opencv2/core/core.hpp"
#include "opencv2/core/utility.hpp"
#include <stdio.h>
#include <string>
#include <map>
#include <vector>
#include<iostream>

using namespace std;
using namespace cv;
using namespace cv::ml;

static void help()
{
	printf(
		"\nThis sample demonstrates how to use different decision trees and forests including boosting and random trees.\n"
		"Usage:\n\t./tree_engine [-r <response_column>] [-ts type_spec] <csv filename>\n"
		"where -r <response_column> specified the 0-based index of the response (0 by default)\n"
		"-ts specifies the var type spec in the form ord[n1,n2-n3,n4-n5,...]cat[m1-m2,m3,m4-m5,...]\n"
		"<csv filename> is the name of training data file in comma-separated value format\n\n");
}

static void train_and_print_errs(Ptr<StatModel> model, const Ptr<TrainData>& data)
{
	bool ok = model->train(data);
	if (!ok)
	{
		printf("Training failed\n");
	}
	else
	{
		printf("train error: %f\n", model->calcError(data, false, noArray()));
		printf("test error: %f\n\n", model->calcError(data, true, noArray()));
	}
}

int main(int argc, char** argv)
{
	if (argc < 2)
	{
		help();
		return 0;
	}
	const char* filename = 0;
	int response_idx = 0;
	std::string typespec;

	for (int i = 1; i < argc; i++)
	{
		if (strcmp(argv[i], "-r") == 0)
			sscanf(argv[++i], "%d", &response_idx);
		else if (strcmp(argv[i], "-ts") == 0)
			typespec = argv[++i];
		else if (argv[i][0] != '-')
			filename = argv[i];
		else
		{
			printf("Error. Invalid option %s\n", argv[i]);
			help();
			return -1;
		}
	}

	printf("\nReading in %s...\n\n", filename);
	const double train_test_split_ratio = 0.5;
	//加载训练数据
	Ptr<TrainData> data = TrainData::loadFromCSV(filename, 0, response_idx, response_idx + 1, typespec);
	if (data.empty()) 
	{
		printf("ERROR: File %s can not be read\n", filename);
		return 0;
	}

	data->setTrainTestSplitRatio(train_test_split_ratio);
	//准备预测数据
	float test1[] = { 14.23, 1.71, 2.43, 15.6, 127, 2.8, 3.06, .28, 2.29, 5.64, 1.04, 3.92, 1065 };
	float test2[] = { 12.37, .94, 1.36, 10.6, 88, 1.98, .57, .28, .42, 1.95, 1.05, 1.82, 520 };
	float test3[] = { 12.86, 1.35, 2.32, 18, 122, 1.51, 1.25, .21, .94, 4.1, .76, 1.29, 630 };
	Mat test1Map(1, 13, CV_32FC1, test1);
	Mat test2Map(1, 13, CV_32FC1, test2);
	Mat test3Map(1, 13, CV_32FC1, test3);

	printf("============正太贝叶斯分类器================\n");
	//创建正态贝叶斯分类器
	Ptr<NormalBayesClassifier> bayes = NormalBayesClassifier::create();
	//训练模型
	train_and_print_errs(bayes, data);
	//保存模型
	bayes->save("bayes_result.xml");
	//读取模型,强行使用一下,为了强调这种用法,当然此处完全没必要
	Ptr<NormalBayesClassifier> bayes2 = NormalBayesClassifier::load<NormalBayesClassifier>("bayes_result.xml");
	cout << bayes2->predict(test1Map) << endl;
	cout << bayes2->predict(test2Map) << endl;
	cout << bayes2->predict(test3Map) << endl;
	cout << "============================================" << endl;
	return 0;
}


可以看到,结果不是很好~~


  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 2
    评论
评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

五癫

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值