KNN 即k近邻算法
思想极度简单
引用数学知识少
效果好
可以解释机器学习算法使用过程中的很多细节
更完整的刻画机器学习应用的流程
import numpy as np
import matplotlib.pyplot as plt
raw_data_x = [[3.393533211, 2.331273381],
[3.110073483, 1.781539638],
[1.343808831, 3.368360954],
[3.582294042, 4.679779110],
[2.280362439, 2.866990263],
[7.423436942, 4.696522875],
[5.745015997, 3.533989803],
[9.172168622, 2.511101045],
[7.792783487, 3.424088941],
[7.9939820917, 0.791637231]
]
raw_data_y = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
X_train = np.array(raw_data_x)
y_train = np.array(raw_data_y)
X_train
array([[3.39353321, 2.33127338],
[3.11007348, 1.78153964],
[1.34380883, 3.36836095],
[3.58229404, 4.67977911],
[2.28036244, 2.86699026],
[7.42343694, 4.69652288],
[5.745016 , 3.5339898 ],
[9.17216862, 2.51110105],
[7.79278349, 3.42408894],
[7.99398209, 0.79163723]])
y_train
array([0, 0, 0, 0, 0, 1, 1, 1, 1, 1])
# 散点图的绘制
plt.scatter(X_train[y_train==0,0], X_train[y_train==0,1], color='g')
plt.scatter(X_train[y_train==1,0], X_train[y_train==1,1], color='r')
plt.show()
x = np.array([8.093607318, 3.365731514])
plt.scatter(X_train[y_train==0,0], X_train[y_train==0,1], color='g')
plt.scatter(X_train[y_train==1,0], X_train[y_train==1,1], color='r')
plt.scatter(x[0], x[1], color='b')
plt.show()
KNN的过程
from math import sqrt
distances = []
for x_train in X_train:
d = sqrt(np.sum((x_train - x)**2))
distances.append(d)
distances
[4.812566907609877,
5.229270827235305,
6.749798999160064,
4.698794372896864,
5.83460014556857,
1.4900114024329525,
2.354610805397302,
1.3761132675144652,
0.3064319934073084,
2.5760214602150753]
np.argsort(distances)
array([8, 7, 5, 6, 9, 3, 0, 1, 4, 2], dtype=int64)
nearest = np.argsort(distances)
k = 6
topK_y = [y_train[i] for i in nearest[:k]]
topK_y
[1, 1, 1, 1, 1, 0]
from collections import Counter
Counter(topK_y)
Counter({1: 5, 0: 1})
votes = Counter(topK_y)
votes.most_common(1)
[(1, 5)]
votes.most_common(1)[0][0]
1
predict_y = votes.most_common(1)[0][0]
predict_y
1