# 共同点

• 感知器和适应机都是用于线性二元分类问题的模型
• 感知器和适应机都需要使用阈值函数，确定分类标准

图中$\theta$为阈值
• 都是逐步学习，优化参数

# 区别点

• 适应机使用激活函数对输入数据多进行了一次处理，再使用激活函数产生的误差值进行参数优化，感知器则是直接利用分类结果进行参数优化
• 适应机通过使用误差能够对输入数据进行连续优化，并且通过激活函数以及误差来判断偏离的程度来进行参数优化，从而比感知器表现更加优秀

图中quantizer为量化器，负责将连续分布的激活函数通过阈值判断进行二元分类

# 代码演示

## 感知器

class Perceptron(object):
# Initialize with learning rate, iterations and random seed
def __init__(self, eta=0.1, n_iter=50, random_state=1):
self.eta = eta
self.n_iter = n_iter
self.random_state = random_state

def fit(self, X, Y):
# Set up numpy random state which will infect shuffling data
rgen = np.random.RandomState(self.random_state)

# Build a random guassian distribution drawer, vector
self.w_ = rgen.normal(loc=0.0, scale=0.01, size=1 + X.shape[1])

# Errors is important for evaluating and testing the model
self.errors_ = []

# iterate
for _ in range(self.n_iter):
errors = 0
for xi, target in zip(X, y):
# Calculate thed update
update = self.eta * (target - self.predict(xi))

# Initialize and update the bias unit
self.w_[0] += update

# Update vector w_
self.w_[1:] += update * xi

errors += int(update != 0.0)

# caculate the error in every iteration
self.errors_.append(errors)

return self

def predict(self, X):
# Predict value of training example
return np.where(self.net_input(X) >= 0.0, 1, -1)

def net_input(self, X):
# Compute training set X with parameter w
return np.dot(X, self.w_[1:]) + self.w_[0]


target指的是分类结果，取0或1

## 适应机

class AdalineGD(object):
def __init__(self, eta=0.01, n_iter=50, random_state=1):
self.eta = eta
self.n_iter = n_iter
self.random_state = random_state

def fit(self, X, y):
rgen = np.random.RandomState(self.random_state)
# Randomly initialize the weights
self.w_ = rgen.normal(loc=0.0, scal=0.01, size=1 + X.shape[1])
self.cost_ = []

for i in range(self.n_iter):
net_input = self.net_input(X)
output = self.activation(net_input)
errors = y - output
self.w_[1:] += self.eta * X.T.dot(errors)
self.w_[0] += self.eta * errors.sum()
cost = (errors**2).sum() / 2.0
self.cost_.append(cost)
return self

def net_input(self, X):
return np.dot(X, self.w_[1:]) + self.w_[0]

# activation function here does nothing but an identification for Adaline
def activation(self, X):
return X

def predict(self, X):
return np.where(self.activation(self.net_input(X))>=0.0, 1, -1)


©️2019 CSDN 皮肤主题: 游动-白 设计师: 上身试试