我想用你的神经网络来加上整数/浮点数,输出值大于1或等于0。我不使用sigmoid函数,而是使用ReLU函数
我用python实现,并使用ReLU方法对神经网络进行numpy,我有这个:from numpy import exp, array, random, dot
class NeuralNetwork():
def __init__(self):
random.seed(1)
# setting the number of nodes in layer 2 and layer 3
# more nodes --> more confidence in predictions (?)
l2 = 5
l3 = 4
# assign random weights to matrices in network
# format is (no. of nodes in previous layer) x (no. of nodes in following layer)
self.synaptic_weights1 = 2 * random.random((3, l2)) -1
self.synaptic_weights2 = 2 * random.random((l2, l3)) -1
self.synaptic_weights3 = 2 * random.random((l3, 1)) -1
def ReLU(self, x):
return abs(x) * (x > 0)
#return self.relu(x, False)
#return 1/(1+exp(-x))
# derivative of sigmoid function, indic