KNN 分类、回归底层

import numpy as np
from scipy.stats import mode
from sklearn.datasets import load_boston
T = np.array([[3, 104, -1],
    [2, 100, -1],
    [1, 81, -1],
    [101, 10, 1],
    [99, 5, 1],
    [98, 2, 1]])
x = np.array([18, 90])

trainX=T[:,:-1]
trainY=T[:,-1]

普通knn分类

def knn_class(trainX,trainY,testX,k=5):
    dist=np.sqrt(np.sum(np.square(trainX-testX),axis=1))
    select=trainY[np.argsort(dist)][:k]
    return mode(select)[0][0]
knn_class(trainX,trainY,x)
-1

加权knn分类

def knn_weight_class(trainX,trainY,testX,k=5):
    weight=np.sqrt(np.sum(np.square(trainX-testX),axis=1))**-1
    sort_arg=np.argsort(weight)[-k:]
    weight_sum=[weight[sort_arg][trainY[sort_arg]==i].sum() for i in np.unique(trainY)]
    return np.unique(trainY)[np.argmax(weight_sum)]
knn_weight_class(trainX,trainY,x)
-1
boston=load_boston()
x=boston.data
y=boston.target

trainX=x[:-1]
trainY=y[:-1]
testX=x[-1]
testY=y[-1]

普通knn回归

def knn_regression(trainX,trainY,testX,k=5):
    dist=np.sqrt(np.sum(np.square(trainX-testX),axis=1))
    select=trainY[np.argsort(dist)][:k]
    return np.mean(select)
pred=knn_regression(trainX,trainY,testX)
print(pred,testY)
23.759999999999998 11.9

加权knn回归

def knn_weight_regression(trainX,trainY,testX,k=5):
    weight=np.sqrt(np.sum(np.square(trainX-testX),axis=1))**-1
    sort_arg=np.argsort(weight)[-k:]

    a=weight[sort_arg].sum()**-1
    res=np.sum(trainY[sort_arg]*weight[sort_arg])

    return res*a
pred=knn_weight_regression(trainX,trainY,testX)
print(pred,testY)

23.112713355853334 11.9
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值