径向基神经网络pthon代码(梯度更新)

基础内容详见:径向基网络函数(最小二乘法)python实现)_class rbfnetwork:-CSDN博客

python代码:

import numpy as np
from sklearn.cluster import KMeans
from scipy.spatial.distance import cdist
import time
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
start = time.perf_counter()
from sklearn.metrics import precision_score, recall_score, accuracy_score,f1_score
from pylab import mpl
# 设置显示中文字体
mpl.rcParams["font.sans-serif"] = ["SimHei"]

#初始化数据
from sklearn.datasets import load_iris
iris = load_iris()
X = np.array(iris.data) #获取特征值
Y = np.array(iris.target)
X_train, X_test, y_train, y_test = train_test_split(X, Y, test_size=0.4, random_state=10)
input_idm=X_train.shape[0]#输入节点
Data_num=X_train.shape[1]#测试样本数
# print("测试样本数:",Data_num)
center_idm=5#聚类数
output_idm=1#输
r=0.001#学习
iters=20000#迭代次数


class RBF:
    def __init__(self,input_idm,center_idm,output_idm,r,iters):
        self.input_idm=input_idm
        self.center_idm=center_idm
        self.output_idm=output_idm
        self.centers=None
        self.W=np.random.random((self.center_idm,self.output_idm))
        self.sigma=None
        self.r=r
        self.iters=iters
    #初始化聚类中心和sigma
    def C_S(self,X):
        kmeans = KMeans(n_clusters=self.center_idm)
        kmeans.fit(X)  # 找到一组适当的中心点
        self.centers = kmeans.cluster_centers_  # 用kmeans找中心点位
        # 计算标准差
        distances = cdist(X, self.centers)
        self.sigma = np.std(distances, axis=0)  # 计算了所有聚类中心的距离标准差的平均值
        print("聚类中心形状:",self.centers.shape)
        print("标准差形状:",self.sigma.shape)
    def run(self,X,Y):
        self.C_S(X)
        Errors=[]
        for i in range(self.iters):
            distance=cdist(X,self.centers)
            gauss_distance=np.exp(-distance ** 2 / (2 * self.sigma ** 2))
            Y_pred=np.dot(self.W.T,gauss_distance.T)
            Error=Y.T-Y_pred
            Errors.append(np.linalg.norm(Error))
            for j in range(center_idm):
                GradC=np.dot((np.dot(self.W[j],Error*gauss_distance.T[j])/(self.sigma[j]**2)),(X-self.centers[j]))
                GradS=np.dot(np.dot(self.W[j],Error*gauss_distance.T[j])/(self.sigma[j]**3),distance.T[j]**2)
                GradW=np.dot(Error,gauss_distance.T[j])
                self.centers[j]=self.centers[j]+self.r*GradC
                self.sigma[j]=self.sigma[j]+self.r*GradS
                self.W=self.W+self.r*GradW
        return Errors

    def evaluation(self,y_test, y_predict):
        precision=precision_score(y_test,y_predict,average='macro')
        accuracy = accuracy_score(y_test, y_predict)
        recall = recall_score(y_test, y_predict,average='macro')
        f1_score = precision_score(y_test, y_predict,average='macro')
        return accuracy, precision, recall, f1_score
    def predict(self,X):
        distance = cdist(X, self.centers)
        gauss_distance = np.exp(-distance ** 2 / (2 * self.sigma ** 2))
        Y_pred = np.dot(self.W.T, gauss_distance.T)
        return Y_pred

rbf=RBF(input_idm,center_idm,output_idm,r,iters)
Errors=rbf.run(X_train,y_train)
y_pred=np.round(rbf.predict(X_test))
print(y_test.shape)
print(y_pred.reshape(-1).shape)
accuracy, precision, recall, f1_score=rbf.evaluation(y_test,y_pred.reshape(-1))
print("accuracy:", accuracy)
print(" precision:", precision)
print(" recall:", recall)
print(" f1_score:", f1_score)
end = time.perf_counter()
print('Running time: %s Seconds' % (end - start))
plt.plot(Errors)
plt.xlabel("迭代次数")
plt.ylabel("误差")
plt.show()

数据来源:Iris - UCI 机器学习存储库

运行结果:

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值