sigmoid函数
公式:
梯度:
tanh函数
公式:
梯度:
relu函数
公式:
梯度:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import math
import setChinese as setCh
setCh.set_ch()
"""sigmoid函数也叫logistics函数 取值范围(0,1)
它能够将任何实数映射到(0,1)区间上。用来做二分类
公式:1/(1+exp(-x))"""
class SigmoidActivator(object):
def sigmoid(self,x):
return 1.0/(1.0+np.exp(-x))
def sigmoid_derivative(self,x):
return self.sigmoid(x)*(1-self.sigmoid(x))
def sigmoid_graph(self):
x = np.arange(-8,8,1)
y = self.sigmoid(x)
y_derivative = self.sigmoid_derivative(x)
plt.plot(x,y,'g:')
plt.plot(x,y_derivative,'r-')
plt.legend(['sigmoid', 'sigmoid_derivative'])
plt.show()
class TanhActivator(object):
def tanh(self,x):
return 2.0/(1+np.exp(-2*x))-1.0
def tanh_derivative(self,x):
return 1-self.tanh(x)*self.tanh(x)
def tanh_graph(self):
x = np.arange(-8,8,1)
y = self.tanh(x)
y_derivative = self.tanh_derivative(x)
plt.plot(x,y,'g:')
plt.plot(x,y_derivative,'r-')
plt.title("tanh函数及其导数(正反向)")
plt.legend(['tanh','tanh_dervative'])
plt.show()
class ReluActivator(object):
def Relu(self,x):
return np.maximum(x,0.0)
def Relu_derivative(self,x):
return np.where(x>0,1,0)
def Relu_gragh(self):
x = np.arange(-8,8,0.1)
y = self.Relu(x)
y_derivative = self.Relu_derivative(x)
plt.plot(x,y,'g:')
plt.plot(x,y_derivative,'r-')
plt.title("Relu函数及其导数(正反向)")
plt.legend(['Relu','Relu_derivative'])
plt.show()
if __name__ == '__main__':
x = 3
sig = SigmoidActivator()
sig.sigmoid(x)
sig.sigmoid_derivative(x)
sig.sigmoid_graph()
tanh = TanhActivator()
tanh.tanh(x)
tanh.tanh_derivative(x)
tanh.tanh_graph()
relu = ReluActivator()
relu.Relu(x)
relu.Relu_derivative(x)
relu.Relu_gragh()