3.机器学习之KNN最优k的选取-学习曲线

KNN最优K值的选取——学习曲线

方法1:网格(暴力)搜索

# 1. 导入所需要的模块和库
import numpy as np 
from sklearn.neighbors import KNeighborsClassifier 
from sklearn.datasets import load_breast_cancer 
from sklearn.model_selection import train_test_split
import matplotlib.pyplot as plt
data = load_breast_cancer()
X = data.data 
y = data.target
X_train,X_test,y_train,y_test = train_test_split(X,y,# 特征和标签
                    test_size=0.3,random_state=420)  # 测试集所占的比例;设置随机种子数

代码的目的是通过循环改变 K-Nearest Neighbors(KNN)算法中的参数 k(即邻居数),并评估不同 k 值下模型在训练集和测试集上的准确率

# 3. 绘制学习曲线
# 设置两个列表保存每个k值下模型在训练集和测试集上得到的评估结果
train_score_list = []
test_score_list = []
# 循环不断改变k值(调参)后重新建模, 重新训练, 然后评估模型在测试集上的准确度
# k = 1——20
for k in range(1, 21):
    # 调参
    knn = KNeighborsClassifier(n_neighbors=k)
    # 采用训练集进行训练
    knn.fit(X_train, y_train)
    # 评估模型
    # 评估模型在训练集上的表现情况
    train_score_list.append(knn.score(X_train, y_train))
    # 评估在模型在测试集上的表现情况
    test_score_list.append(knn.score(X_test, y_test))
    #列表类型赋值变量.append(obj):添加新的元素到列表中
    #knn.score:计算模型在训练集上的准确率
print(train_score_list)
[1.0, 0.9723618090452262, 0.9623115577889447, 0.9623115577889447, 0.9547738693467337, 0.949748743718593, 0.9422110552763819, 0.9447236180904522, 0.9422110552763819, 0.9447236180904522, 0.9447236180904522, 0.9447236180904522, 0.9396984924623115, 0.9371859296482412, 0.9371859296482412, 0.9371859296482412, 0.9321608040201005, 0.9346733668341709, 0.9296482412060302, 0.9346733668341709]
print(test_score_list)
[0.8888888888888888, 0.8771929824561403, 0.9122807017543859, 0.9064327485380117, 0.9181286549707602, 0.9181286549707602, 0.9298245614035088, 0.935672514619883, 0.9298245614035088, 0.935672514619883, 0.9298245614035088, 0.935672514619883, 0.9239766081871345, 0.9239766081871345, 0.9298245614035088, 0.9298245614035088, 0.935672514619883, 0.935672514619883, 0.9298245614035088, 0.9239766081871345]
# 4. 绘制成折线图
plt.figure(dpi=200)

#绘制了两条曲线
plt.plot(range(1, 21), train_score_list, label='train_score');
plt.plot(range(1, 21), test_score_list, label='test_score');

#设置X、Y坐标轴标签
plt.xlabel('k_value')
plt.ylabel('socre')
plt.legend()
plt.show()

外链图片转存失败,源站可能有防盗链机制,建议将图片保存下来直接上传

# 5. 评估模型在测试集的最优得分
max(test_score_list) 
0.935672514619883
# 6. 求最大值对应的k值
print(np.argmax(test_score_list)+1) # argmax找出最大值的索引值
8

方法2:交叉验证

import numpy as np 
from sklearn.neighbors import KNeighborsClassifier 
from sklearn.datasets import load_breast_cancer 
from sklearn.model_selection import train_test_split
import matplotlib.pyplot as plt
from sklearn.model_selection import cross_val_score
data = load_breast_cancer()
X = data.data 
y = data.target
X_train,X_test,y_train,y_test = train_test_split(X,y,# 特征和标签
                    test_size=0.3,random_state=420)  # 测试集所占的比例
# 设置两个列表保存生成的评估结果
train_score_list = []
test_score_list = []
score = []
var = []
# 循环不断改变k值, 重新建模 , 重新训练, 然后评估
# k = 1-20
for k in range(1, 21):
    # 实例化
    knn = KNeighborsClassifier(n_neighbors=k)
    # 采用训练集进行训练
    knn.fit(X_train, y_train)
    # 评估模型
    # 可以评估模型在训练集上的表现情况
    train_score_list.append(knn.score(X_train, y_train))
    # 评估在测试集上的表现情况
    test_score_list.append(knn.score(X_test, y_test))
    # 计算5折交叉验证的平均值和方差
    cross = cross_val_score(knn, X_train, y_train, cv=5)
    score.append(cross.mean()) # 计算5折交叉验证的均值,并使用score.appebd()函数将其添加到score列表中
    var.append(cross.var()) #方差
print(score)
print(var)
[0.9220886075949366, 0.9270569620253164, 0.9320569620253165, 0.9295886075949367, 0.934620253164557, 0.929620253164557, 0.9395886075949367, 0.9396202531645569, 0.934620253164557, 0.9396518987341773, 0.9270886075949367, 0.9371202531645568, 0.9270253164556962, 0.9295569620253165, 0.9270569620253164, 0.9270569620253164, 0.9245569620253165, 0.9245569620253165, 0.9245569620253165, 0.9245569620253165]
[0.00021623738182983474, 0.0004824547348181385, 0.0007446082358596371, 0.00030059285370934077, 0.00022031124819740403, 0.0002915137798429741, 0.0004181361160070506, 0.0002241087165518342, 9.531124819740365e-05, 0.00021917160711424402, 0.00022353589168402476, 0.0004488022752763978, 0.0006115946963627635, 0.000622169924691557, 0.0006699547348181394, 0.0006699547348181394, 0.0005177395449447208, 0.0005177395449447208, 0.0005177395449447208, 0.0005177395449447208]
plt.figure(dpi=200)
plt.plot(range(1, 21), train_score_list, label='train_score');
plt.plot(range(1, 21), test_score_list, label='test_score');
plt.plot(range(1, 21), score, color='g',label='cross_score');

#使用np.array()函数将score列表转化为数据;计算每个 k 值下交叉验证分数的上/下限,即平均分数加上/减去两倍的方差。
plt.plot(range(1, 21),np.array(score)+np.array(var)*2,c='red',linestyle='--')
plt.plot(range(1, 21),np.array(score)-np.array(var)*2,c='red',linestyle='--')
plt.xlabel('k_value')
plt.ylabel('socre')
plt.legend();

外链图片转存失败,源站可能有防盗链机制,建议将图片保存下来直接上传

np.argmax(score)+1
max(score)
0.9396518987341773
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值