KNN算法及Python、C++实现

K最近邻(K-nearest neighbors,KNN)是一种基本的监督学习算法,用于分类和回归问题。KNN的基本思想是:给定一个新样本,通过测量其与训练数据集中已知样本的距离,来找出K个最接近(距离最近)的训练样本,然后利用这K个样本的类别信息进行预测。

算法步骤如下:

  1. 选择K值:确定K的值,即要考虑多少个最近邻样本。

  2. 计算距离:对于新样本,计算它与训练集中所有样本的距离,常用的距离度量包括欧氏距离、曼哈顿距离、余弦距离等。

  3. 找出最近的K个样本:根据距离找出距离新样本最近的K个训练样本。

  4. 投票/平均:对于分类问题,统计K个样本中每个类别出现的次数,将出现次数最多的类别作为新样本的预测类别。对于回归问题,取K个样本的平均值作为新样本的预测值。

Python代码(Iris数据集)

import operator
import pandas as pd
import numpy as np
import random

data = pd.read_csv('./Iris.csv').values
data = np.array(data)
data_1 = data[:, 1:-1]
len_x = len(data_1[0])
len_y = len(data)
len_train = 100

#将数据集分为训练集和验证集
def train_valid_split(data,  len_train, len_x, len_y):
    len_test = len_y - len_train
    train_set = np.zeros([len_train, len_x])
    test_set = np.zeros([len_test, len_x])
    mark = np.zeros([len_y,1])
    answer = np.zeros([len_test, 1])
    label = np.zeros([len_train,1])
    m = 0
    for i in range(len_train):
        x = random.randint(0, 149)
        train_set[i] = data[x]
        mark[x] = x
        label[i] = x
    for j in range(len_y):
        if mark[j] == 0 and m < len_test:
            test_set[m] = data[j]
            answer[m] = j
            m += 1
    return np.array(train_set), np.array(test_set), label, answer

def Distance(train_set, test_set_i, k, label, data):
    Distance = []
    len0 = len(train_set)

    diff = np.tile(test_set_i, (len0, 1)) - train_set
    squaredDiff = diff ** 2
    squaredDist = np.sum(squaredDiff, axis=1)
    distance = squaredDist ** 0.5

    sort = distance.argsort()

    classCount = {}
    for i in range(k):
        if data[label[sort[i]].astype(int),5] =='Iris-setosa':
            Labels = 1
        if data[label[sort[i]].astype(int),5] =='Iris-versicolor':
            Labels = 2
        if data[label[sort[i]].astype(int),5] =='Iris-virginica':
            Labels = 3
        classCount[Labels] = classCount.get(Labels, 0) + 1

    sortClass = sorted(classCount.items(), key = operator.itemgetter(1), reverse=True)

    return sortClass[0][0]



train_set, test_set, label, answer= train_valid_split(data_1, len_train, len_x, len_y)
k = 7
j = 0
sum = 0
for i in range(len_y - len_train):
    if data[answer[j].astype(int),5] =='Iris-setosa':
        Labels = 1
    if data[answer[j].astype(int),5] =='Iris-versicolor':
        Labels = 2
    if data[answer[j].astype(int),5] =='Iris-virginica':
        Labels = 3
    if Distance(train_set, test_set[i], k, label, data)== Labels:
        sum += 1
    print(f'第{i+1}个训练数据为{test_set[i]},标签为{Distance(train_set, test_set[i], k, label, data)},正确答案是{Labels}')
    j += 1
print(f'正确率为{(sum/(len_y - len_train))*100}%')

C++代码(Iris数据集)

#include <iostream> 
#include <string>
#include <fstream>
#include <iostream>
#include <sstream>
#include <vector>
#include <cstring>
#include <algorithm>
#include <string>
#include <math.h>
#include <stdlib.h>

using namespace std;

struct Flower {
	int id;
	double character[4];
	string labels;
};

//训练集与测试集的随机分割
void GenerateTests(int m, vector<struct Flower> FlowerVector, vector<struct Flower> &random_tests,vector<struct Flower> &random_trains)
{
	vector<struct Flower> tmp(FlowerVector);

	int random_index;
	int n = FlowerVector.size();
	int mark[150]={0};
	int j = 0;
	for(int i=0;i<m;i++)
	{
		random_index = rand() % 150;
		random_tests.push_back(FlowerVector.at(random_index));
		mark[random_index] = random_index;
	}
	
	for(int i=0;i<150;i++)
	{
		if(mark[i] == 0 && j<100)
		{
			random_trains.push_back(FlowerVector.at(i));
			j++;
		}
	}
}

//计算欧式距离
double GetDistance(Flower p0, Flower p1)
{
	double sum = 0;
	for(int i=0;i<4;i++)
	{
		sum += (p0.character[i]-p1.character[i])*(p0.character[i]-p1.character[i]);
	}
	sum = sqrt(sum);
	return sum;
}

void Knn(vector<struct Flower> random_tests, vector<struct Flower> random_trains, int k )
{
	double temp;
	int a=0,b=0,c=0;
	for(int i=0;i<random_tests.size();i++)
	{
		double distance[random_trains.size()][2] = {0};
		for(int j=0;j<random_trains.size();j++)
		{
			distance[j][0] = j;
			distance[j][1] = GetDistance(random_tests[i],random_trains[j]);
			//cout<<distance[j][1]<<endl;
		}

		for(int j=1;j<random_trains.size();j++)
			for(int l=0;l<random_trains.size()-j;l++)
			{
				if(distance[l][1]>distance[l+1][1])
				{
					temp = distance[l][1];
					distance[l][1] = distance[l+1][1];
					distance[l+1][1] = temp;

					temp = distance[l][0];
					distance[l][0] =distance[l+1][0];
					distance[l+1][0] = temp;
				}
			}
	
		for(int j=0;j<k;j++)
		{

			if(random_trains[distance[j][0]].labels == "Iris-setosa")
				a++;
			if(random_trains[distance[j][0]].labels == "Iris-versicolor")
				b++;
			if(random_trains[distance[j][0]].labels == "Iris-virginica")
				c++;
			
		}
		if(a>=b && a>=c)
		{
			cout<<i+1<<','<<random_tests[i].labels<<','<<'1'<<endl;
		}
		if(b>=a && b>=c)
		{
			cout<<i+1<<','<<random_tests[i].labels<<','<<'2'<<endl;
		}
		if(c>=b && c>=a)
		{
			cout<<i+1<<','<<random_tests[i].labels<<','<<'3'<<endl;
		}
		
		a=0,b=0,c=0;
	}
}


int main()
{
	ifstream infile("Iris.csv", ios::in);
	string line;
	vector<struct Flower> FlowerVector;
	getline(infile, line);
	while (getline(infile, line))
	{
		stringstream ss(line);
		string str;
		Flower flower;

		getline(ss, str, ',');
		flower.id = stoi(str);
		getline(ss, str, ',');
		flower.character[0] = stod(str);
		getline(ss, str, ',');
		flower.character[1] = stod(str);
		getline(ss, str, ',');
		flower.character[2] = stod(str);
		getline(ss, str, ',');
		flower.character[3] = stold(str);
		getline(ss, str, ',');
		flower.labels = str;
		FlowerVector.push_back(flower);
	}
	int x = FlowerVector.size();

	int m=50;
	int k=7;
	vector<struct Flower> random_tests;
	vector<struct Flower> random_trains;
	GenerateTests(m,FlowerVector, random_tests, random_trains);
	Knn(random_tests, random_trains, k );
	return 0;
}

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 1
    评论
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值