实现的比较粗糙,代码如下:
class Perceptron:
import numpy as np
def __init__(self, w = 0, b = 0, lr = 1, epoch = 100):
self.weight = w
self.bias = b
self.lr = lr #lr: learning rate
self.epoch = epoch
def sign(self, x):
if np.dot( np.array(self.weight), x) + self.bias >= 0:
return 1
else:
return -1
def train(self,X, y):
X_train = np.array(X)
convergent = False
epoch = 0
while not convergent and epoch < self.epoch:
convergent = True
for i in range(np.array(X_train).shape[0]):
x_i = X_train[i]
y_i = y[i]
if y_i * self.sign(x_i) > 0:
pass
else:
convergent = False
self.weight += self.lr * y_i*x_i
self.bias += self.lr * y_i
epoch += 1
def test(self,X):
X_test = np.array(X)
y_test = np.zeros(X_test.shape[0])
for i in range(X_test.shape[0]):
y_test[i] = self.sign(X_test[i])
return y_test
def print_arribute(self):
print("weight:",self.weight)
print("bias:",self.bias)
用书上例2.1测试一下结果:
X = [[3,3],[4,3],[1,1]]
y = [1,1,-1]
#init the perceptron with [0,0] as weight, 0 as bias
perceptron = Perceptron([0,0],0)
#train
perceptron.train(X,y)
#check the parameters
perceptron.print_arribute()
结果如下
('weight:', array([1, 1]))
('bias:', -3)
尝试一下训练集的结果
perceptron.test(X)
array([ 1., 1., -1.])