import math
import numpy
import funcSet
def getLoss(errOut, corOut):
return numpy.sum(1 / 2 * (errOut - corOut) ** 2)
class NeuralLine:
def __init__(self, inputLen, hiddenLen, activeFunc, studyRate):
self.matrix = numpy.random.rand(inputLen, hiddenLen)
self.outputs = None
self.inherent = numpy.zeros((1, hiddenLen))
self.activeFunc = activeFunc
self.studyRate = studyRate
self.next = None
self.pre = None
self.d_loss_ho = None
def input(self, inputs):
self.outputs = numpy.dot(inputs, self.matrix)
self.outputs += self.inherent
self.outputs = self.activeFunc.func(self.outputs)
def update(self, inputs):
d_ho_h = self.activeFunc.derivativeFunc(self.outputs)
d_loss_h = self.d_loss_ho * d_ho_h
self.inherent -= self.studyRate * numpy.sum(d_loss_h, axis=0, keepdims=True)
# 这里比较烦,不过相乘后矩阵行列数对了,正好就对了
d_loss_matrix = numpy.dot(inputs.T, d_loss_h)
self.matrix -= self.studyRate * d_loss_matrix
def backpropagation(self, d_loss_ho):
self.d_loss_ho = d_loss_ho
# return numpy.sum(self.matrix * d_loss_ho, axis=1)
# 这里也很烦,不过相乘后矩阵行列数对了,正好就对了
return numpy.dot(d_loss_ho, self.matrix.T)
class NeuralNet:
def __init__(self, inputLen, neuralLenArray, funcArray, studyRateArray):
self.netHead = NeuralLine(inputLen, neuralLenArray[0], funcArray[0], studyRateArray[0])
self.netEnd = self.netHead
for i in range(1, len(neuralLenArray)):
self.netEnd.next = NeuralLine(neuralLenArray[i - 1], neuralLenArray[i], funcArray[i], studyRateArray[i])
self.netEnd.next.pre = self.netEnd
self.netEnd = self.netEnd.next
def input(self, inputs):
temp = self.netHead
temp.input(inputs)
temp = temp.next
while temp is not None:
temp.input(temp.pre.outputs)
temp = temp.next
def train(self, inputs, outputs):
circuCount = 0
while circuCount < 10000:
self.setTrain(inputs, outputs)
cost = getLoss(self.netEnd.outputs, outputs)
circuCount += 1
if cost < 1e-10:
break
print('circuCount = %d, cost = ' % circuCount + str(cost))
def setTrain(self, inputs, outputs):
self.input(inputs)
d_loss_o = self.netEnd.outputs - outputs
temp = self.netEnd
d_loss_o = temp.backpropagation(d_loss_o)
temp = temp.pre
while temp is not None:
d_loss_o = temp.backpropagation(d_loss_o)
temp = temp.pre
temp = self.netHead
temp.update(inputs)
temp = temp.next
while temp is not None:
temp.update(temp.pre.outputs)
temp = temp.next
def test():
inputs = numpy.array([[0, 1], [1, 0], [0, 0], [1, 1]])
outputs = numpy.array([[1], [1], [0], [0]])
neuralLenArray = [4, len(outputs[0])]
funcArray = [funcSet.reLuSet, funcSet.straightSet]
studyRateArray = [0.1, 0.1]
nnet = NeuralNet(len(inputs[0]), neuralLenArray, funcArray, studyRateArray)
nnet.train(inputs, outputs)
print(nnet.netEnd.outputs)
if __name__ == '__main__':
test()
neuralNet,支持训练集一起输入训练,之前是一个个训练。。没利用好numpy的单指令多数据优化
最新推荐文章于 2024-08-13 18:29:56 发布