感知器perception的python代码实现

感知器模型训练

数据集的生成我放在了这个连接下面:
双月型数据集的生成
采用上一个博客生成的双月形数据集,把数据集送到感知器模型中进行训练,这里我太懒了,不想写什么是感知器模型了,先填一个坑,以后来补上把。

什么是感知器?

更新权重

数据集

训练结果

在这里插入图片描述
从里面能看出,训练出来的网络模型还是可以的,准确率达到97%了。

accuracy_rate: 0.975 lost_count: 10 last lost_count: 40
权重分别为: 0.20136202161906264, 0.9729840644208955

上图中,第一个图片是生成出来的半月形数据集,第二个图片是测试集和决策先(决策平面),第三张图是画出分类错误的点,第四张图片是画出错误率,可以看出,随着不断地学习,错误率会越来越小。

代码

import numpy
import matplotlib.pyplot as plt
from half_moonDataSet import Sj_Hal_moonDataSet

class Perception(object):
    def __init__(self, w_dim, epoch = 10, l_rate = 0.01):

        self.w = numpy.ones(w_dim + 1, dtype=numpy.float32)
        self.epoch = epoch
        self.l_rate = l_rate
        self.lost = []

    def sign(self, y):

        if y >= 0:
            return 1
        else:
            return -1

    def weight_sum(self, x):
        return float(numpy.dot(x, self.w))

    def weight_sum_all(self, X):
        return numpy.dot(X, self.w)

    def fit(self, x_train, x_train_label):

        temp = numpy.ones(len(x_train))
        x_train = numpy.column_stack((x_train, temp))
        del temp

        self.lost.clear()
        loss_temp = 0
        for iter_ in range(self.epoch):
            for i in range(len(x_train)):
                x = x_train[i]
                y = x_train_label[i]
                #SGD
                fit_res = self.sign(float(numpy.dot(x, self.w)))
                # print('1w', self.w)
                # print('fit_res:',fit_res, 'y:', y)

                if fit_res != y:
                    loss_temp += 1  # 记录错误个数

                    # 更新 weight
                    # solution 1 :
                    self.w = self.w + self.l_rate*(y - fit_res)*x
                    # solution 2 :
                    # self.w += x*y*self.l_rate
            self.lost.append(loss_temp)

            if iter_ %5 == 0:  # 每训练5个epoch就打印一次 iter_,loss
                print('We are training......' + 'iter_:', iter_, 'loss:', loss_temp)

            loss_temp = 0

    def predict(self, x_test):
        temp = numpy.ones(len(x_test))
        x_test = numpy.column_stack((x_test, temp))  # up X dimension
        del temp  # Recycle temp
        y_predict = list(map(lambda x: 1 if x >= 0 else -1, list(self.weight_sum_all(x_test))))
        return numpy.array(y_predict)

    def score(self, y, label):
        accuracy = 0
        rate_temp = 0
        rate = []
        for i in range(len(y)):
            if y[i] == label[i]:
                accuracy += 1
                rate_temp = accuracy / len(label)
                rate.append(rate_temp)
        return rate, len(label) - accuracy


if __name__ == '__main__':
    random_seed = 20  # 指定随机数种子,用于复现随机数样本
    makeData = Sj_Hal_moonDataSet()
    makeData.random_seed(random_seed)

    np_data, label = makeData.double_moon(origin_y=1, origin_x=1, sample_data=2000, ver_distance=-1, width=1,
                                          hor_distance=3, slope=15)

    Train_x, Train_label, Test_x, Test_label = makeData.moon_train_test_split.train_test_split(sample_set=np_data,
                                                                                               label_set=label,
                                                                                               test_rate=0.2)

    perception = Perception(Train_x.shape[1], epoch=8000, l_rate=0.00001)
    perception.fit(Train_x, Train_label)

    # print('Train_x:\n', Train_x, '\nTrain_label:', Train_label, '\nTest_x:', Test_x, '\nTest_label:', Test_label)

    # 测试训练好的网络
    y_predict = perception.predict(Test_x)
    accuracy_rate, lost_count = perception.score(y_predict, Test_label)
    print('accuracy_rate:', accuracy_rate[-1], 'lost_count:', lost_count, 'last lost_count:', perception.lost[len(perception.lost) - 1])




    pdata_f1 = [np_data[i][0] for i in range(len(np_data)) if label[i] == 1]
    pdata_f2 = [np_data[i][1] for i in range(len(np_data)) if label[i] == 1]
    ndata_f1 = [np_data[i][0] for i in range(len(np_data)) if label[i] == -1]
    ndata_f2 = [np_data[i][1] for i in range(len(np_data)) if label[i] == -1]

    positive_f1 = [Test_x[i][0] for i in range(len(Test_x)) if Test_label[i] == 1]
    positive_f2 = [Test_x[i][1] for i in range(len(Test_x)) if Test_label[i] == 1]
    negetive_f1 = [Test_x[i][0] for i in range(len(Test_x)) if Test_label[i] == -1]
    negetive_f2 = [Test_x[i][1] for i in range(len(Test_x)) if Test_label[i] == -1]

    mistake_f1_pre = [Test_x[i][0] for i in range(len(Test_x)) if y_predict[i] != Test_label[i]]
    mistake_f2_pre = [Test_x[i][1] for i in range(len(Test_x)) if y_predict[i] != Test_label[i]]

    fig = plt.figure(num=1, figsize=(15, 5))

    ax0 = fig.add_subplot(141)
    ax0.scatter(pdata_f1, pdata_f2, color='red', alpha=0.5)
    ax0.scatter(ndata_f1, ndata_f2, color='blue', alpha=0.5)

    ax1 = fig.add_subplot(142)
    ax1.scatter(positive_f1, positive_f2, c='red', alpha=0.5)
    ax1.scatter(negetive_f1, negetive_f2, c='blue', alpha=0.5)

    line_x = numpy.linspace(-6, 15, 100)

    line_w = -1 * (perception.w[0] / perception.w[1])
    line_b = -1 * (float(perception.w[2]) / perception.w[1])
    print('\n', line_w, line_b)

    line_y = list(map(lambda x: x * line_w + line_b, line_x))

    ax1.plot(line_x, line_y, c='orange')

    ax2 = fig.add_subplot(143)

    ax2.scatter(mistake_f1_pre, mistake_f2_pre, c='orange', alpha=0.5)
    ax2.plot(line_x, line_y, c='orange')

    ax3 = fig.add_subplot(144)
    ax3.plot(range(0, len(perception.lost)), perception.lost)
    # print('\nlost:', perception.lost, 'lost len:', len(perception.lost))

    plt.show()

参考博客:
https://blog.csdn.net/qq_40454401/article/details/121352000?spm=1001.2014.3001.5501

  • 0
    点赞
  • 5
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值