KNN算法的实现


Knn.h

#pragma once

class Knn
{
private:
 double** trainingDataset;
 double* arithmeticMean;
 double* standardDeviation;
 int m, n;

 void RescaleDistance(double* row);
 void RescaleTrainingDataset();
 void ComputeArithmeticMean();
 void ComputeStandardDeviation();

 double Distance(double* x, double* y);
public:
 Knn(double** trainingDataset, int m, int n);
 ~Knn();
 double Vote(double* test, int k);
};

 

Knn.cpp

 

#include "Knn.h"
#include 
#include 

using namespace std;

Knn::Knn(double** trainingDataset, int m, int n)
{
 this->trainingDataset = trainingDataset;
 this->m = m;
 this->n = n;
 ComputeArithmeticMean();
 ComputeStandardDeviation();
 RescaleTrainingDataset();
}

void Knn::ComputeArithmeticMean()
{
 arithmeticMean = new double[n - 1];

 double sum;

 for(int i = 0; i < n - 1; i++)
 {
  sum = 0;
  for(int j = 0; j < m; j++)
  {
   sum += trainingDataset[j][i];
  }

  arithmeticMean[i] = sum / n;
 }
}

void Knn::ComputeStandardDeviation()
{
 standardDeviation = new double[n - 1];

 double sum, temp;

 for(int i = 0; i < n - 1; i++)
 {
  sum = 0;
  for(int j = 0; j < m; j++)
  {
   temp = trainingDataset[j][i] - arithmeticMean[i];
   sum += temp * temp;
  }

  standardDeviation[i] = sqrt(sum / n);
 }
}

void Knn::RescaleDistance(double* row)
{
 for(int i = 0; i < n - 1; i++)
 {
  row[i] = (row[i] - arithmeticMean[i]) / standardDeviation[i];
 }
}

void Knn::RescaleTrainingDataset()
{
 for(int i = 0; i < m; i++)
 {
  RescaleDistance(trainingDataset[i]);
 }
}

Knn::~Knn()
{
 delete[] arithmeticMean;
 delete[] standardDeviation;
}

double Knn::Distance(double* x, double* y)
{
 double sum = 0, temp;
 for(int i = 0; i < n - 1; i++)
 {
  temp = (x[i] - y[i]);
  sum += temp * temp;
 }

 return sqrt(sum);
}

double Knn::Vote(double* test, int k)
{
 RescaleDistance(test);

 double distance;

 map::iterator max;

 map mins;

 for(int i = 0; i < m; i++)
 {
  distance = Distance(test, trainingDataset[i]);
  if(mins.size() < k)
   mins.insert(map::value_type(i, distance));
  else
  {
   max = mins.begin();
   for(map::iterator it = mins.begin(); it != mins.end(); it++)
   {
    if(it->second > max->second)
     max = it;
   }
   if(distance < max->second)
   {
    mins.erase(max);
    mins.insert(map::value_type(i, distance));
   }
  }
 }

 map votes;
 double temp;

 for(map::iterator it = mins.begin(); it != mins.end(); it++)
 {
  temp = trainingDataset[it->first][n-1];
  map::iterator voteIt = votes.find(temp);
  if(voteIt != votes.end())
   voteIt->second ++;
  else
   votes.insert(map::value_type(temp, 1));
 }

 map::iterator maxVote = votes.begin();

 for(map::iterator it = votes.begin(); it != votes.end(); it++)
 {
  if(it->second > maxVote->second)
   maxVote = it;
 }

 test[n-1] = maxVote->first;

 return maxVote->first;
}

 

main.cpp

 

#include 
#include "Knn.h"

using namespace std;

int main(const int& argc, const char* argv[])
{
 double** train = new double* [14];
 for(int i = 0; i < 14; i ++)
  train[i] = new double[5];
 double trainArray[14][5] =
 {
  {0, 0, 0, 0, 0},
  {0, 0, 0, 1, 0},
  {1, 0, 0, 0, 1},
  {2, 1, 0, 0, 1},
  {2, 2, 1, 0, 1},
  {2, 2, 1, 1, 0},
  {1, 2, 1, 1, 1},
  {0, 1, 0, 0, 0},
  {0, 2, 1, 0, 1},
  {2, 1, 1, 0, 1},
  {0, 1, 1, 1, 1},
  {1, 1, 0, 1, 1},
  {1, 0, 1, 0, 1},
  {2, 1, 0, 1, 0}
 };

 for(int i = 0; i < 14; i ++)
  for(int j = 0; j < 5; j ++)
   train[i][j] = trainArray[i][j];

 Knn knn(train, 14, 5);

 double test[5] = {2, 2, 0, 1, 0};
 cout<
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值